From 8d91228f03f35c04305bf33d8817f21ef379776a Mon Sep 17 00:00:00 2001
From: Svavar Konradsson <svavarkonn@gmail.com>
Date: Tue, 27 Jun 2023 13:33:31 +0000
Subject: [PATCH] minor markdown fix in integration.md

---
 docs/assignments/week08.md            |   4 +++-
 docs/final-project/integration.md     |   2 +-
 public/assignments/week08.html        |  17 ++++++++++++++++-
 public/final-project/integration.html |   2 +-
 public/search/search_index.json       |   2 +-
 public/sitemap.xml.gz                 | Bin 217 -> 217 bytes
 6 files changed, 22 insertions(+), 5 deletions(-)

diff --git a/docs/assignments/week08.md b/docs/assignments/week08.md
index 7dc1e1be..90d97b50 100644
--- a/docs/assignments/week08.md
+++ b/docs/assignments/week08.md
@@ -147,11 +147,13 @@ Then I tried the super thin 0.01 inch flat end mill, and I must admit that I for
 
 ![Milling test 0.01 inch](images/week08/test-010.jpg){: style="width:100%"}*I broke two 0.01 inch milling bits trying to mill this test file.*
 
+### Eventual success with fine traces
+
 I waited until the [final project](https://fabacademy.org/2023/labs/isafjordur/students/svavar-konradsson/assignments/week18.html#what-has-worked-what-hasnt) to try the 0.01 inch end mill again, then at the very slow speed of 0.1 mm/s. It worked for an hour and then broke in the middle of the night. I documented my frustration in my [final project video](https://fabacademy.org/2023/labs/isafjordur/students/svavar-konradsson/final-project/presentation.html) and in my final project presentation, Neil Gershenfeld mentioned that everything has to be perfect for this milling bit to work. You have to plane the wasteboard, clean the machine, everything has to be just right. And I think I also made the mistake of having it mill all the traces, instead of just around the ICs with the smallest pads.
 
 ![V-bit](https://fabacademy.org/2023/labs/isafjordur/students/svavar-konradsson/assignments/images/week18/beautiful_traces.jpg){: style="width:100%"}*In the end I was able to mill the finest traces on my final project board with a V-bit. Then I cleared the whole board with a 1/64th inch flat end mill and milled the holes and outline with a 1/32 inch flat end mill.*
 
-Here is an assembled robot joint running a PID control loop:
+Here is the assembled robot joint running a PID control loop:
 
 <video controls width=100%>
       <source src="https://fabacademy.org/2023/labs/isafjordur/students/svavar-konradsson/final-project/images/pid_control.mp4" type="video/mp4">
diff --git a/docs/final-project/integration.md b/docs/final-project/integration.md
index 9264c490..93dae631 100644
--- a/docs/final-project/integration.md
+++ b/docs/final-project/integration.md
@@ -24,7 +24,7 @@ The night before my final project presentation, I assembled one robot joint and
 
 <video controls width=100%>
       <source src="images/pid_control.mp4" type="video/mp4">
-</video>
+</video>*Phew, it works!*
 
 
 <style>
diff --git a/public/assignments/week08.html b/public/assignments/week08.html
index e2a57bbe..a43fd899 100644
--- a/public/assignments/week08.html
+++ b/public/assignments/week08.html
@@ -696,6 +696,13 @@
     0.01 inch bit
   </a>
   
+</li>
+        
+          <li class="md-nav__item">
+  <a href="#eventual-success-with-fine-traces" class="md-nav__link">
+    Eventual success with fine traces
+  </a>
+  
 </li>
         
       </ul>
@@ -951,6 +958,13 @@
     0.01 inch bit
   </a>
   
+</li>
+        
+          <li class="md-nav__item">
+  <a href="#eventual-success-with-fine-traces" class="md-nav__link">
+    Eventual success with fine traces
+  </a>
+  
 </li>
         
       </ul>
@@ -1068,9 +1082,10 @@
 <p>Then I tried the super thin 0.01 inch flat end mill, and I must admit that I forgot to change the milling speed. So the first attempt was at a fast pace of 4 mm/s. The end mill broke immediately. Then I tried again at a slow speed of 0.5 mm/s and the same cut depth 0.1 mm. It also broke quite quickly. This was frustrating.</p>
 <p><img alt="0.01 inch test Fab Modules" src="images/week08/test-010-modules.jpg" style="width:100%" /><em>There are more offsets, since the milling bit is thinner.</em></p>
 <p><img alt="Milling test 0.01 inch" src="images/week08/test-010.jpg" style="width:100%" /><em>I broke two 0.01 inch milling bits trying to mill this test file.</em></p>
+<h3 id="eventual-success-with-fine-traces">Eventual success with fine traces</h3>
 <p>I waited until the <a href="https://fabacademy.org/2023/labs/isafjordur/students/svavar-konradsson/assignments/week18.html#what-has-worked-what-hasnt">final project</a> to try the 0.01 inch end mill again, then at the very slow speed of 0.1 mm/s. It worked for an hour and then broke in the middle of the night. I documented my frustration in my <a href="https://fabacademy.org/2023/labs/isafjordur/students/svavar-konradsson/final-project/presentation.html">final project video</a> and in my final project presentation, Neil Gershenfeld mentioned that everything has to be perfect for this milling bit to work. You have to plane the wasteboard, clean the machine, everything has to be just right. And I think I also made the mistake of having it mill all the traces, instead of just around the ICs with the smallest pads.</p>
 <p><img alt="V-bit" src="https://fabacademy.org/2023/labs/isafjordur/students/svavar-konradsson/assignments/images/week18/beautiful_traces.jpg" style="width:100%" /><em>In the end I was able to mill the finest traces on my final project board with a V-bit. Then I cleared the whole board with a 1/64th inch flat end mill and milled the holes and outline with a 1/32 inch flat end mill.</em></p>
-<p>Here is an assembled robot joint running a PID control loop:</p>
+<p>Here is the assembled robot joint running a PID control loop:</p>
 <video controls width=100%>
       <source src="https://fabacademy.org/2023/labs/isafjordur/students/svavar-konradsson/final-project/images/pid_control.mp4" type="video/mp4">
 </video>
diff --git a/public/final-project/integration.html b/public/final-project/integration.html
index 081d240f..2cbb53fd 100644
--- a/public/final-project/integration.html
+++ b/public/final-project/integration.html
@@ -886,7 +886,7 @@
 <video controls width=100%>
       <source src="images/pid_control.mp4" type="video/mp4">
 </video>
-
+<p><em>Phew, it works!</em></p>
 <style>
   .md-content__button {
     display: none;
diff --git a/public/search/search_index.json b/public/search/search_index.json
index a3c7fe02..166f5a51 100644
--- a/public/search/search_index.json
+++ b/public/search/search_index.json
@@ -1 +1 @@
-{"config":{"indexing":"full","lang":["en"],"min_search_length":3,"prebuild_index":false,"separator":"[\\s\\-]+"},"docs":[{"location":"index.html","text":"Svavar's Fab Academy Journey Me My name is Svavar Konr\u00e1\u00f0sson . Welcome to my home on the web. Here I will document my studies at the Fab Academy , class 2023 . I have the good fortune to work at Fab Lab \u00cdsafj\u00f6r\u00f0ur , Iceland. My instructor and the head of Fab Lab \u00cdsafj\u00f6r\u00f0ur (and meme grand master) is \u00de\u00f3rarinn Bjartur Brei\u00f0fj\u00f6r\u00f0 Gunnarsson . I plan to pester him day and night for the next six months when my gadgets aren't working. What is the Fab Academy? How can I describe the Fab Academy? It's like getting an acceptance letter into Hogwarts. A vast world opens up to you full of outstanding people who, working together, can make (almost) anything. You have the support of a group of unassuming people around the world who are actually technology wizards, but also appreciate the human element in art and design. It's the only online school where I've actually gotten to know the people taking part in it. Already I've made a contribution to a technical project in Spain and I'm only just getting started in the Fab Lab network. I've made friends, laughed and cried and yet the Academy somehow only exists inside my laptop and I struggle to convey how significant this thing is to outsiders. Currently there are 2500 places like this in the world where I can now walk in and talk the same language as the people there and share stories of struggling to make things work. And the number of Fab Labs doubles every 18 months. Isn't that wild? Something's going on here. Team FMCU . See my contribution here . The Fab Academy has existed in a side reality for a long time, but it's invisible to Muggles. They have their peculiar speech and conventions that are impenetrable to outsiders. You Google the Fab Academy and find a cryptic website full of bare HTML links to technical resources without any explanations. Ah, my friend, but you lack the guidance and the context! Come on in. Check out the meme channel in the Mattermost chat. \u00de\u00f3rarinn, the undisputed Fab Academy meme master. See here . Image from Adri\u00e1n Torres. An introduction to modern technology as we know it might realistically be taken on in a ten-year period, but we rush through it in six months. You'd better keep up, because you need to make microcontroller boards that talk to each other- wait, no, now you must mill a 3D shape and make a casting- hold on, now it's time for web programming, but lay that to one side because now you need to stay up all night to finish your final project and present it to a hundred faces on your laptop screen that are in all time zones and climates and circumstances in the world and you are now a part of this group, which shares a traumat- I mean a transformative experience. Two out of three Icelanders taking the Fab Academy this year got sick right after their final project presentation, because the pressure had been so high. Was it worth it? Absolutely. I would hardly dare make electronics before but now I'm considering taking an electronic product to market. There is a before and an after the Fab Academy. I now know how an aluminum profile feels. You know, they don't heat the material that much, they just push it with enormous force until it deforms and takes on the shape of the die. Before I was a disorganized scatterbrain. Now I talk about designing things in spirals and documenting everything, not because I'm being forced to do it anymore but because I enjoy the process. What is happening to me? My rune This is the rune that I made for myself in grade school, made up of my initials SK. To start my Fab Academy journey, I made it digital, using Inkscape. My desk The image shows my desk when all the components for the Fab Academy arrived. Yay! Outside the window you can see the harbor and the mountain. In the window next to the headphones is the second thing I ever 3D printed, the Stormtrooper Buddha . My desk setup consists of three screens: Tandy 102 On the left I have a Tandy 102 laptop from 1985 that I use for writing, because it has the best keyboard I've ever used by far. This was my father's computer. He ordered a device called NADSBox that plugs into its RS232 port and allows me to put my writings onto an SD card. They're in the old .DO format (this came before .DOC, which came before .DOCX). Then I run a little converter program and voil\u00e1! I can put the text on the web. I use this computer in memory of my father and I also light a candle on my desk every day to keep him with me. ThinkPad In the middle is my ThinkPad laptop, which can become totally flat! So I milled a simple stand out of two pieces of birch plywood and lasered our logo on it. I use the laptop screen mostly for modeling and image editing. It's color calibrated. I also use it to connect to a few Raspberry Pi computers using a VNC Viewer cloud connection. Vertical monitor On the right is a vertical monitor that I use for coding, reading documents and browsing the internet. Missing from the picture is a 10 year old iPad which works great. I mainly use it to read and annotate PDF documents now, but I used it as my work computer at university for a while. I even wrote and submitted a grant application using only the on-screen keyboard! .md-content__button { display: none; }","title":"Home"},{"location":"index.html#svavars-fab-academy-journey","text":"","title":"Svavar's Fab Academy Journey   "},{"location":"index.html#me","text":"My name is Svavar Konr\u00e1\u00f0sson . Welcome to my home on the web. Here I will document my studies at the Fab Academy , class 2023 . I have the good fortune to work at Fab Lab \u00cdsafj\u00f6r\u00f0ur , Iceland. My instructor and the head of Fab Lab \u00cdsafj\u00f6r\u00f0ur (and meme grand master) is \u00de\u00f3rarinn Bjartur Brei\u00f0fj\u00f6r\u00f0 Gunnarsson . I plan to pester him day and night for the next six months when my gadgets aren't working.","title":"Me"},{"location":"index.html#what-is-the-fab-academy","text":"How can I describe the Fab Academy? It's like getting an acceptance letter into Hogwarts. A vast world opens up to you full of outstanding people who, working together, can make (almost) anything. You have the support of a group of unassuming people around the world who are actually technology wizards, but also appreciate the human element in art and design. It's the only online school where I've actually gotten to know the people taking part in it. Already I've made a contribution to a technical project in Spain and I'm only just getting started in the Fab Lab network. I've made friends, laughed and cried and yet the Academy somehow only exists inside my laptop and I struggle to convey how significant this thing is to outsiders. Currently there are 2500 places like this in the world where I can now walk in and talk the same language as the people there and share stories of struggling to make things work. And the number of Fab Labs doubles every 18 months. Isn't that wild? Something's going on here. Team FMCU . See my contribution here . The Fab Academy has existed in a side reality for a long time, but it's invisible to Muggles. They have their peculiar speech and conventions that are impenetrable to outsiders. You Google the Fab Academy and find a cryptic website full of bare HTML links to technical resources without any explanations. Ah, my friend, but you lack the guidance and the context! Come on in. Check out the meme channel in the Mattermost chat. \u00de\u00f3rarinn, the undisputed Fab Academy meme master. See here . Image from Adri\u00e1n Torres. An introduction to modern technology as we know it might realistically be taken on in a ten-year period, but we rush through it in six months. You'd better keep up, because you need to make microcontroller boards that talk to each other- wait, no, now you must mill a 3D shape and make a casting- hold on, now it's time for web programming, but lay that to one side because now you need to stay up all night to finish your final project and present it to a hundred faces on your laptop screen that are in all time zones and climates and circumstances in the world and you are now a part of this group, which shares a traumat- I mean a transformative experience. Two out of three Icelanders taking the Fab Academy this year got sick right after their final project presentation, because the pressure had been so high. Was it worth it? Absolutely. I would hardly dare make electronics before but now I'm considering taking an electronic product to market. There is a before and an after the Fab Academy. I now know how an aluminum profile feels. You know, they don't heat the material that much, they just push it with enormous force until it deforms and takes on the shape of the die. Before I was a disorganized scatterbrain. Now I talk about designing things in spirals and documenting everything, not because I'm being forced to do it anymore but because I enjoy the process. What is happening to me?","title":"What is the Fab Academy?"},{"location":"index.html#my-rune","text":"This is the rune that I made for myself in grade school, made up of my initials SK. To start my Fab Academy journey, I made it digital, using Inkscape.","title":"My rune"},{"location":"index.html#my-desk","text":"The image shows my desk when all the components for the Fab Academy arrived. Yay! Outside the window you can see the harbor and the mountain. In the window next to the headphones is the second thing I ever 3D printed, the Stormtrooper Buddha . My desk setup consists of three screens:","title":"My desk"},{"location":"index.html#tandy-102","text":"On the left I have a Tandy 102 laptop from 1985 that I use for writing, because it has the best keyboard I've ever used by far. This was my father's computer. He ordered a device called NADSBox that plugs into its RS232 port and allows me to put my writings onto an SD card. They're in the old .DO format (this came before .DOC, which came before .DOCX). Then I run a little converter program and voil\u00e1! I can put the text on the web. I use this computer in memory of my father and I also light a candle on my desk every day to keep him with me.","title":"Tandy 102"},{"location":"index.html#thinkpad","text":"In the middle is my ThinkPad laptop, which can become totally flat! So I milled a simple stand out of two pieces of birch plywood and lasered our logo on it. I use the laptop screen mostly for modeling and image editing. It's color calibrated. I also use it to connect to a few Raspberry Pi computers using a VNC Viewer cloud connection.","title":"ThinkPad"},{"location":"index.html#vertical-monitor","text":"On the right is a vertical monitor that I use for coding, reading documents and browsing the internet. Missing from the picture is a 10 year old iPad which works great. I mainly use it to read and annotate PDF documents now, but I used it as my work computer at university for a while. I even wrote and submitted a grant application using only the on-screen keyboard! .md-content__button { display: none; }","title":"Vertical monitor"},{"location":"about.html","text":"About me Article in Icelandic about my work and studies I'm Svavar Konr\u00e1\u00f0sson. I have a BSc in Mechanical Engineering. The only thing I have left in my Master's degree is the thesis. I'll do that after the Fab Academy :) Here's a video where I tell Neil Gershenfeld a little bit about myself during random review. I took part in starting Team Spark , the Icelandic Formula Student team, which designs and fabricates an electric racing car and competes against other university teams at Silverstone circuit in England every year. I did the structural design of an innovative RIB at Rafnar boatyard , a boat that is now manufactured in five countries around the world. I started a company around the design of a simple and inexpensive suspension seat for high-speed boats. I also started the first proper 3D printing service in Iceland. I've started several promising things. My problem has been bad focus and project management and letting the scope of my projects grow until it's impossible to finish them. Therefore I suspect that Project Management will prove to be the most important part of Fab Academy for me. Now I work at Fab Lab \u00cdsafj\u00f6r\u00f0ur, Iceland and I'm interested in ways to introduce students to computer-controlled machines. I want 2023 to be my year of finishing things. Note I still need to add more details to the bio. I need to figure out the way to copy text documents from my father's antiquated Tandy 102 portable computer, on which I wrote more text about myself. Fab Academy Student Agreement The Fab Academy is responsible for: Teaching principles and practices of digital fabrication Arranging lectures, recitations, meetings, and events for the class Evaluating and providing feedback on student work Offering clear standards for completing assignments Certifying and archiving student progress Supervising class preparation Reviewing prospective students, instructors, and labs Providing central staff and infrastructure for students, instructors, and labs Fund-raising for costs not covered by student tuition Managing and reporting on the program's finances, results, and impacts Publicizing the program Promoting a respectful environment free of harassment and discrimination Encourage a diverse, accessible, and equitable community I am a Fab Academy student, responsible for: Attending class lectures and participating in reviews Developing and documenting projects assigned to introduce and demonstrate skills Allowing the Fab Academy to share my work (with attribution) in the class for purposes compatible with its mission Honestly reporting on my work, and appropriately attributing the work of others (both human and machine) Working safely Leaving workspaces in the same (or better) condition than I found them Participating in the upkeep of my lab Ensuring that my tuition for local and central class costs is covered Following locally applicable health and safety guidance Promoting a respectful environment free of harassment and discrimination Signed by committing this file in my repository, Svavar Konr\u00e1\u00f0sson .md-content__button { display: none; }","title":"About Me"},{"location":"about.html#about-me","text":"Article in Icelandic about my work and studies","title":"About me   "},{"location":"about.html#im-svavar-konrasson","text":"I have a BSc in Mechanical Engineering. The only thing I have left in my Master's degree is the thesis. I'll do that after the Fab Academy :) Here's a video where I tell Neil Gershenfeld a little bit about myself during random review. I took part in starting Team Spark , the Icelandic Formula Student team, which designs and fabricates an electric racing car and competes against other university teams at Silverstone circuit in England every year. I did the structural design of an innovative RIB at Rafnar boatyard , a boat that is now manufactured in five countries around the world. I started a company around the design of a simple and inexpensive suspension seat for high-speed boats. I also started the first proper 3D printing service in Iceland. I've started several promising things. My problem has been bad focus and project management and letting the scope of my projects grow until it's impossible to finish them. Therefore I suspect that Project Management will prove to be the most important part of Fab Academy for me. Now I work at Fab Lab \u00cdsafj\u00f6r\u00f0ur, Iceland and I'm interested in ways to introduce students to computer-controlled machines. I want 2023 to be my year of finishing things. Note I still need to add more details to the bio. I need to figure out the way to copy text documents from my father's antiquated Tandy 102 portable computer, on which I wrote more text about myself.","title":"I'm Svavar Konr\u00e1\u00f0sson."},{"location":"about.html#fab-academy-student-agreement","text":"The Fab Academy is responsible for: Teaching principles and practices of digital fabrication Arranging lectures, recitations, meetings, and events for the class Evaluating and providing feedback on student work Offering clear standards for completing assignments Certifying and archiving student progress Supervising class preparation Reviewing prospective students, instructors, and labs Providing central staff and infrastructure for students, instructors, and labs Fund-raising for costs not covered by student tuition Managing and reporting on the program's finances, results, and impacts Publicizing the program Promoting a respectful environment free of harassment and discrimination Encourage a diverse, accessible, and equitable community I am a Fab Academy student, responsible for: Attending class lectures and participating in reviews Developing and documenting projects assigned to introduce and demonstrate skills Allowing the Fab Academy to share my work (with attribution) in the class for purposes compatible with its mission Honestly reporting on my work, and appropriately attributing the work of others (both human and machine) Working safely Leaving workspaces in the same (or better) condition than I found them Participating in the upkeep of my lab Ensuring that my tuition for local and central class costs is covered Following locally applicable health and safety guidance Promoting a respectful environment free of harassment and discrimination Signed by committing this file in my repository, Svavar Konr\u00e1\u00f0sson .md-content__button { display: none; }","title":"Fab Academy Student Agreement"},{"location":"assignments/week01.html","text":"Principles and Practices Final project sketch Here I am describing my final project idea to Neil Gershenfeld and the students and instructors in Fab Academy cycle 2023. Link to the video. This week, I set up an ideas page with the three ideas that I have for a final project in the Fab Academy. Check them out, they're quite fun! I ended up picking the friendly little educational robot arm called baks. It's powered by brushless servomotors and has a structure made entirely of PCBs! So the robot consists of motors and PCBs and nothing else! I may not be able to get all the way there during the Fab Academy, but I do want to make that happen eventually. I put the Student Agreement on my About page. This is a document that I sign by committing it to my repository and it states the code of conduct that I will abide by during my Fab Academy studies. Setting up this website The rest of this page is dedicated to how I set up this nice website using Material for MkDocs . The website is hosted on Gitlab and deployed automatically using this file . But you can also easily deploy an MkDocs website to GitHub Pages. I did that before starting the Fab Academy, to get a little bit of a head start. You can check out the test site . I experimented with lots of different colors, but ended up finding black and white cleaner and more elegant. I also added a dark mode that you can toggle, but removed it again when it didn't work very well with the color scheme I ended up with. I really liked trying all the features in Material for MkDocs, it's a really well designed package and superbly documented. In a video call a few months before the Fab Academy started, \u00c1rni Bj\u00f6rnsson showed M\u00f3ses and me how to set up MkDocs and Git. I've summarized all the steps here, with a few extra resources I found along the way: MkDocs setup from scratch First, I installed the VSCode editor. Then, as \u00c1rni Bj\u00f6rnsson suggested, I created a folder called code directly in my C: drive, so that Dropbox and OneDrive don't try to sync the folder and mess up my GitHub connection. I followed this tutorial to set up MkDocs. There are a few steps to it: It starts with installing the Python extension for VSCode . The pip package manager is included with the Python extension, but for some reason you don't always get the latest version. It may be a good idea to check the version. I opened a new terminal in the top menu in VSCode ( Terminal -> New Terminal ) and typed pip --version pip 22.3.1 from C:\\Users\\your_username\\AppData\\Local\\Programs\\Python\\Python310\\lib\\site-packages\\pip (python 3.10) and if it says it's out of date you can upgrade pip like this: pip install --upgrade pip Now it was time to install MkDocs: pip install mkdocs Then, as the tutorial suggests, I typed mkdocs --version to see if the installation went OK: mkdocs --version mkdocs, version 1.2.0 from /usr/local/lib/python3.8/site-packages/mkdocs (Python 3.8) (optional) I also installed the Material theme because it seemed nice and it includes expandable code annotations: pip install mkdocs-material Note To enable notes like this one in MkDocs, I added Admonitions to the extensions in the mkdocs.yml file: markdown_extensions : admonition Then, to create a note, start with !!! note and then indent the note text: !!! note Note text MkDocs test drive I followed this tutorial to set up a small test website and get a live preview. After installing Material for MkDocs, I made a folder called Mkdocs. Then I opened a terminal and made sure that it was in the right folder: cd C:\\code\\Mkdocs Then I simply typed mkdocs new . and that was enough to create a simple site! Well, there are a few extra steps to view the site and deploy it, but this whole process is very simple. Then I added the following lines to mkdocs.yml: theme : name : material This is geared towards the Material theme for MkDocs, so if you're using a different theme, the only thing you need to change is the the theme line in the mkdocs.yml file. Set up autocomplete. The tutorial suggests adding a line to settings.json, but it doesn't mention where that file is in VSCode. But it does provide a link to instructions. You go into File -> Preferences -> Settings , scroll all the way down to Yaml: Schemas, and click Edit in settings.json . Then you add the line \"https://squidfunk.github.io/mkdocs-material/schema.json\": \"mkdocs.yml\" , so in the end it looks like this: { \"workbench.colorTheme\" : \"Default Dark+\" , \"files.autoSave\" : \"afterDelay\" , \"yaml.schemas\" : { \"https://squidfunk.github.io/mkdocs-material/schema.json\" : \"mkdocs.yml\" } } OK, now we're yearning for something to happen. Type the following into the terminal: mkdocs serve Now open your browser and write localhost:8000 in the address bar. Voil\u00e1! We have a live preview for an MkDocs website! Material theme Default theme The source code for this site, written in Markdown, looks like this: # Welcome to MkDocs For full documentation visit [mkdocs.org](https://www.mkdocs.org). ## Commands * `mkdocs new [dir-name]` - Create a new project. * `mkdocs serve` - Start the live-reloading docs server. * `mkdocs build` - Build the documentation site. * `mkdocs -h` - Print help message and exit. ## Project layout mkdocs.yml # The configuration file. docs/ index.md # The documentation homepage. ... # Other markdown pages, images and other files. As you can see, Markdown is simple and readable. Writing # gives you the biggest heading, ## gives you heading 2, and so on. Put * in front of text to make a bullet point. To add a link, you do this: [mkdocs.org](https://www.mkdocs.org) and to add an image you do the same, but with an exclamation mark: ![Material theme](./images/material.png) 5. Finally, to build a static site, write this in the terminal: mkdocs build I tried this and got an index page that works fine. But when I clicked the Setup page (this page) I got this: Hm. Apparently you need to put the structure of the site into your mkdocs.yml file to explicitly state the site navigation. So I opened it up and added nav: - 'index.md' - 'code.md' # The code page is just a few code block tests in different languages. - 'setup.md' No, that didn't work either. After some looking around I found a solution . I added the following line to mkdocs.yml : use_directory_urls: false It works! And the first solution is unnecessary; MkDocs will infer the site navigation based on the pages you create in the docs folder. Setting up MkDocs and getting the live preview working took me an hour in the morning. Writing up how I did it took me the rest of the day. Writing this documentation was a great way to learn Markdown. I like Markdown, with one exception; I don't have the ` symbol on my keyboard. I need to use this symbol quite a lot for code snippets. I did a bit of searching and found that the shortcut Alt-96 is bound to the ` symbol. Now I use that shortcut all the time. And this page serves as my Markdown reference, when I start to forget how to format things. Note To enable code highlighting (coloring the code, similar to how it looks in the VSCode editor), I added the following lines to mkdocs.yml : markdown_extensions: - pymdownx.highlight: anchor_linenums: true - pymdownx.inlinehilite - pymdownx.snippets - pymdownx.superfences - pymdownx.details I'm not sure if all these lines are necessary, but I'm just following this tutorial . The last line comes from somewhere else, I don't remember where. Anyway, now I can make a code block by enclosing the code with ``` at the top and bottom and including the name of the language at the top: ```python # This program prints Hello, world! print('Hello, world!') ``` This results in: # This program prints Hello, world! print ( 'Hello, world!' ) You can use this reference to write the name of the programming language correctly at the top of your code block. One more thing, I also added markdown_extensions : - attr_list - md_in_html to add the ability to align images, add captions and mark large images for lazy loading, as per this tutorial . Customizing the theme There is a way to toggle between light and dark mode. This tutorial says that it's enough to copy the following code into mkdocs.yml : Light/dark toggle Light/dark toggle + Color change theme : palette : # Palette toggle for light mode - scheme : default toggle : icon : material/brightness-7 name : Switch to dark mode # Palette toggle for dark mode - scheme : slate toggle : icon : material/brightness-4 name : Switch to light mode theme : palette : # Palette toggle for light mode - scheme : default toggle : icon : material/brightness-7 name : Switch to dark mode primary : red accent : red # Palette toggle for dark mode - scheme : slate toggle : icon : material/brightness-4 name : Switch to light mode primary : red accent : red Source code for the content tabs above === \"Light/dark toggle\" ``` yaml theme: palette: # Palette toggle for light mode - scheme: default toggle: icon: material/brightness-7 name: Switch to dark mode # Palette toggle for dark mode - scheme: slate toggle: icon: material/brightness-4 name: Switch to light mode ``` === \"Light/dark toggle + Color change\" ``` yaml palette: # Palette toggle for light mode - scheme: default toggle: icon: material/brightness-7 name: Switch to dark mode primary: red accent: red # Palette toggle for dark mode - scheme: slate toggle: icon: material/brightness-4 name: Switch to light mode primary: red accent: red ``` Note How to enable content tabs in mkdocs.yaml : markdown_extensions : - pymdownx.superfences - pymdownx.tabbed : alternate_style : true I also discovered that if you select a command, right click and select Change All Occurrences in VSCode, you only need to write the new color once, instead of four times. Nice! Finally, I made a logo in Inkscape. I designed this logo in grade school, it's a kind of Icelandic rune that combines my initials S and K. Then I added two lines to mkdocs.yml to change the logo in the top left corner and also the favicon (the icon you see in the browser tab). theme : logo : images\\SK_logo.svg favicon : images\\SK_logo.svg First I wrote the path as /images/SK_logo.SVG and VSCode complained about the formatting. I found that you can right click the image and select Copy Relative Path to get the right formatting. That gave me docs\\images\\SK_logo.svg , which didn't work, but when I changed it to images\\SK_logo.svg it worked. I also enabled two navigation options: theme : features : - navigation.instant # Instant loading (page behaves like a single-page application, search persists between pages) - navigation.tabs # The pages are visible as tabs at the top instead of on the left hand side. Page source The Markdown code for the View page Source button is like this: [View page source](setup.txt){ .md-button } I'm going to put it on every page of my documentation. If you see an interesting element in the page, you can then easily see how to set it up. Pointing to a .md file doesn't work, so my workaround is to make a copy of the Markdown source file and change its extension to .txt . I made a Python script using this tutorial and put it in the MkDocs folder. The script copies all the .md files in the docs folder to another folder called textfiles and converts their extension to .txt. The View Page Source button at the bottom of each page links to its respective .txt file. The Python code has some bug, so that it only converts code.md to code.txt, but I'm happy that I was able to get that far. To change the theme for just the home page, I followed tmeuze's advice in this issue . To mkdocs.yml I added custom_dir : docs/overrides and created a docs/overrides folder. Then I was unsure how to set up a custom theme, so I stopped there. To enable the Github Repository button in the top right corner, I followed this example and added the following to my mkdocs.yml , just below site_name : repo_name : Github Repository repo_url : https://github.com/svavarkonn/MkDocs I added theme : features : - navigation.tracking so that the URL in the address bar changes as you scroll down the page. If you copy the URL, the page will open in the section where you were when you copied it. Might be convenient if someone wants to link to something on this site. I also added theme : features : - navigation.tabs - navigation.tabs.sticky to make the top navigation follow you as you scroll down the page. By default, an \"Edit this page\" symbol is added to the right of the headline of every page. When you click it you just get a 404 error. I followed this to remove the edit button. I just add the CSS code < style > . md-content__button { display : none ; } </ style > to the .md file of each page and voil\u00e1! The edit button disappears. To enable icons and emojis, I followed the Material for MkDocs documentation on icons and emojis and added the following to mkdocs.yml : markdown_extensions: - attr_list - pymdownx.emoji: emoji_index: !!python/name:materialx.emoji.twemoji emoji_generator: !!python/name:materialx.emoji.to_svg Now I can make faces :smile: (Hmm, apparently this isn't working anymore.) To enable keyboard keys like Ctrl + Alt + Del , I added the following to mkdocs.yml : markdown_extensions : - pymdownx.keys Now I can add keyboard keys into the text by enclosing the expression with ++, and using one + between keys. The buttons above are made by typing ++ctrl+alt+del++ . Here is the key reference. There is no way to make image captions in Markdown. This seems like a glaring omission. I used this method of putting the caption in the next line after the image tag and enclosing the caption with **. Like this: ![ Git discussion ]( images/hategit.PNG ) *Some sentiments about Git* The image and caption are displayed like this: Sentiments about Git The caption is inline with the image, which is not great, but the workflow is simple, so I'm keeping it. If the caption doesn't work, put it inline with the image tag. View page source I've stopped using the page source button above, which links to a text file that I need to update manually. I've instead added an icon next to the page title at the top. I got the icon from Iconify . The icon links to the page source in the Github repository. MathJax I installed MathJax by following the steps in the Material for MkDocs documentation . Now I can display beautiful equations on my website, using LaTex syntax. Git setup I cloned my Fab Academy repository on Gitlab to my computer with $ git clone https://gitlab.fabcloud.org/academany/fabacademy/2023/labs/isafjordur/students/svavar-konradsson.git Cloning into 'svavar-konradsson'... remote: Enumerating objects: 15, done. remote: Counting objects: 100% (15/15), done. remote: Compressing objects: 100% (14/14), done. remote: Total 15 (delta 1), reused 0 (delta 0), pack-reused 0 Receiving objects: 100% (15/15), 28.91 KiB | 7.23 MiB/s, done. Resolving deltas: 100% (1/1), done. Then I edited index.html a little bit and tried pushing the change to the online repo on Gitlab: git push warning: missing OAuth configuration for gitlab.fabcloud.org - see https://aka.ms/gcm/gitlab for more information remote: HTTP Basic: Access denied. The provided password or token is incorrect or your account has 2FA enabled and you must use a personal access token instead of a password. See https://gitlab.fabcloud.org/help/topics/git/troubleshooting_git#error-on-git-fetch-http-basic-access-denied fatal: Authentication failed for 'https://gitlab.fabcloud.org/academany/fabacademy/2023/labs/isafjordur/students/svavar-konradsson.git/' I looked up how to get an access token and my instructor \u00de\u00f3rarinn asked \u00c1rni at Fab Lab Akureyri if tokens are the thing to use. \u00c1rni recommended using an SSH key instead. I managed to generate an ssh key using this tutorial : $ ssh-keygen -t ed25519 -C \"generate an ssh key for gitlab to clone my repository\" Generating public/private ed25519 key pair. Enter file in which to save the key (/n//.ssh/id_ed25519): Enter passphrase (empty for no passphrase): Then I wasn't able to type anything as a passphrase. Then I found that I was able to type, but the Git Bash terminal just didn't show anything. I wrote a phrase twice and hit Enter. I got an SSH key. OK, I have an SSH key, but what do I do with it? And why? Why is this so complicated? I just want to upload some files to the internet. Then I found this tutorial on adding an SSH key to my Gitlab account and followed it blindly. I used $ cat ~/.ssh/id_ed25519.pub | clip to copy the contents of the SSH key file. Put it into the Gitlab account under profile -> SSH keys -> Add an SSH key. Then went into C:/code and said `git clone \"the thing I copied when I pressed clone in gitlab\"' Then I got the message: *** Please tell me who you are. Run git config --global user.email \"you@example.com\" git config --global user.name \"Your Name\" to set your account's default identity. I set the identity: PS C:\\code\\svavar-konradsson> git config --global user.email \"my@email.com\" PS C:\\code\\svavar-konradsson> git config --global user.name \"Svavar Konradsson\" and then said git clone That worked! I opened index.html, put my name into the title and saved. That appeared under Source control in Gitlab, I wrote a comment in a field and clicked Commit. Then a Sync button appeared and I pressed that and it pushed the site onto the online repo. Now I need to type my passphrase twice every time that I push files to the online repo. That's annoying, so I'm going to generate a new SSH key and skip the passphrase. I followed \u00c1rni Bj\u00f6rnsson documentation to generate an RSA key and put it into my Gitlab profile. Every time I made major changes in Windows Explorer; deleted lots of files and moved others, I needed to generate a new SSH key. The last one was ssh-keygen -t rsa -b 2048 cat ~/.ssh/id_rsa.pub | clip The MkDocs convention is to put the built web page into a folder called site, but Gitlab needs the web site to be in a folder called public. Finally I found the site_dir setting , which I can change in mkdocs.yml so that I can rename the site folder to public . At first I manually renamed site to public and pushed the files to the Gitlab repo. I got into trouble when the dot in front of the file .gitlab-ci.yml was erased somehow and the site wasn't deployed. My instructor \u00de\u00f3rarinn found the problem and after that the site worked. .md-content__button { display: none; }","title":"1. Principles and Practices"},{"location":"assignments/week01.html#principles-and-practices","text":"","title":"Principles and Practices   "},{"location":"assignments/week01.html#final-project-sketch","text":"Here I am describing my final project idea to Neil Gershenfeld and the students and instructors in Fab Academy cycle 2023. Link to the video. This week, I set up an ideas page with the three ideas that I have for a final project in the Fab Academy. Check them out, they're quite fun! I ended up picking the friendly little educational robot arm called baks. It's powered by brushless servomotors and has a structure made entirely of PCBs! So the robot consists of motors and PCBs and nothing else! I may not be able to get all the way there during the Fab Academy, but I do want to make that happen eventually. I put the Student Agreement on my About page. This is a document that I sign by committing it to my repository and it states the code of conduct that I will abide by during my Fab Academy studies.","title":"Final project sketch"},{"location":"assignments/week01.html#setting-up-this-website","text":"The rest of this page is dedicated to how I set up this nice website using Material for MkDocs . The website is hosted on Gitlab and deployed automatically using this file . But you can also easily deploy an MkDocs website to GitHub Pages. I did that before starting the Fab Academy, to get a little bit of a head start. You can check out the test site . I experimented with lots of different colors, but ended up finding black and white cleaner and more elegant. I also added a dark mode that you can toggle, but removed it again when it didn't work very well with the color scheme I ended up with. I really liked trying all the features in Material for MkDocs, it's a really well designed package and superbly documented. In a video call a few months before the Fab Academy started, \u00c1rni Bj\u00f6rnsson showed M\u00f3ses and me how to set up MkDocs and Git. I've summarized all the steps here, with a few extra resources I found along the way:","title":"Setting up this website"},{"location":"assignments/week01.html#mkdocs-setup-from-scratch","text":"First, I installed the VSCode editor. Then, as \u00c1rni Bj\u00f6rnsson suggested, I created a folder called code directly in my C: drive, so that Dropbox and OneDrive don't try to sync the folder and mess up my GitHub connection. I followed this tutorial to set up MkDocs. There are a few steps to it: It starts with installing the Python extension for VSCode . The pip package manager is included with the Python extension, but for some reason you don't always get the latest version. It may be a good idea to check the version. I opened a new terminal in the top menu in VSCode ( Terminal -> New Terminal ) and typed pip --version pip 22.3.1 from C:\\Users\\your_username\\AppData\\Local\\Programs\\Python\\Python310\\lib\\site-packages\\pip (python 3.10) and if it says it's out of date you can upgrade pip like this: pip install --upgrade pip Now it was time to install MkDocs: pip install mkdocs Then, as the tutorial suggests, I typed mkdocs --version to see if the installation went OK: mkdocs --version mkdocs, version 1.2.0 from /usr/local/lib/python3.8/site-packages/mkdocs (Python 3.8) (optional) I also installed the Material theme because it seemed nice and it includes expandable code annotations: pip install mkdocs-material Note To enable notes like this one in MkDocs, I added Admonitions to the extensions in the mkdocs.yml file: markdown_extensions : admonition Then, to create a note, start with !!! note and then indent the note text: !!! note Note text","title":"MkDocs setup from scratch"},{"location":"assignments/week01.html#mkdocs-test-drive","text":"I followed this tutorial to set up a small test website and get a live preview. After installing Material for MkDocs, I made a folder called Mkdocs. Then I opened a terminal and made sure that it was in the right folder: cd C:\\code\\Mkdocs Then I simply typed mkdocs new . and that was enough to create a simple site! Well, there are a few extra steps to view the site and deploy it, but this whole process is very simple. Then I added the following lines to mkdocs.yml: theme : name : material This is geared towards the Material theme for MkDocs, so if you're using a different theme, the only thing you need to change is the the theme line in the mkdocs.yml file. Set up autocomplete. The tutorial suggests adding a line to settings.json, but it doesn't mention where that file is in VSCode. But it does provide a link to instructions. You go into File -> Preferences -> Settings , scroll all the way down to Yaml: Schemas, and click Edit in settings.json . Then you add the line \"https://squidfunk.github.io/mkdocs-material/schema.json\": \"mkdocs.yml\" , so in the end it looks like this: { \"workbench.colorTheme\" : \"Default Dark+\" , \"files.autoSave\" : \"afterDelay\" , \"yaml.schemas\" : { \"https://squidfunk.github.io/mkdocs-material/schema.json\" : \"mkdocs.yml\" } } OK, now we're yearning for something to happen. Type the following into the terminal: mkdocs serve Now open your browser and write localhost:8000 in the address bar. Voil\u00e1! We have a live preview for an MkDocs website! Material theme Default theme The source code for this site, written in Markdown, looks like this: # Welcome to MkDocs For full documentation visit [mkdocs.org](https://www.mkdocs.org). ## Commands * `mkdocs new [dir-name]` - Create a new project. * `mkdocs serve` - Start the live-reloading docs server. * `mkdocs build` - Build the documentation site. * `mkdocs -h` - Print help message and exit. ## Project layout mkdocs.yml # The configuration file. docs/ index.md # The documentation homepage. ... # Other markdown pages, images and other files. As you can see, Markdown is simple and readable. Writing # gives you the biggest heading, ## gives you heading 2, and so on. Put * in front of text to make a bullet point. To add a link, you do this: [mkdocs.org](https://www.mkdocs.org) and to add an image you do the same, but with an exclamation mark: ![Material theme](./images/material.png) 5. Finally, to build a static site, write this in the terminal: mkdocs build I tried this and got an index page that works fine. But when I clicked the Setup page (this page) I got this: Hm. Apparently you need to put the structure of the site into your mkdocs.yml file to explicitly state the site navigation. So I opened it up and added nav: - 'index.md' - 'code.md' # The code page is just a few code block tests in different languages. - 'setup.md' No, that didn't work either. After some looking around I found a solution . I added the following line to mkdocs.yml : use_directory_urls: false It works! And the first solution is unnecessary; MkDocs will infer the site navigation based on the pages you create in the docs folder. Setting up MkDocs and getting the live preview working took me an hour in the morning. Writing up how I did it took me the rest of the day. Writing this documentation was a great way to learn Markdown. I like Markdown, with one exception; I don't have the ` symbol on my keyboard. I need to use this symbol quite a lot for code snippets. I did a bit of searching and found that the shortcut Alt-96 is bound to the ` symbol. Now I use that shortcut all the time. And this page serves as my Markdown reference, when I start to forget how to format things. Note To enable code highlighting (coloring the code, similar to how it looks in the VSCode editor), I added the following lines to mkdocs.yml : markdown_extensions: - pymdownx.highlight: anchor_linenums: true - pymdownx.inlinehilite - pymdownx.snippets - pymdownx.superfences - pymdownx.details I'm not sure if all these lines are necessary, but I'm just following this tutorial . The last line comes from somewhere else, I don't remember where. Anyway, now I can make a code block by enclosing the code with ``` at the top and bottom and including the name of the language at the top: ```python # This program prints Hello, world! print('Hello, world!') ``` This results in: # This program prints Hello, world! print ( 'Hello, world!' ) You can use this reference to write the name of the programming language correctly at the top of your code block. One more thing, I also added markdown_extensions : - attr_list - md_in_html to add the ability to align images, add captions and mark large images for lazy loading, as per this tutorial .","title":"MkDocs test drive"},{"location":"assignments/week01.html#customizing-the-theme","text":"There is a way to toggle between light and dark mode. This tutorial says that it's enough to copy the following code into mkdocs.yml : Light/dark toggle Light/dark toggle + Color change theme : palette : # Palette toggle for light mode - scheme : default toggle : icon : material/brightness-7 name : Switch to dark mode # Palette toggle for dark mode - scheme : slate toggle : icon : material/brightness-4 name : Switch to light mode theme : palette : # Palette toggle for light mode - scheme : default toggle : icon : material/brightness-7 name : Switch to dark mode primary : red accent : red # Palette toggle for dark mode - scheme : slate toggle : icon : material/brightness-4 name : Switch to light mode primary : red accent : red Source code for the content tabs above === \"Light/dark toggle\" ``` yaml theme: palette: # Palette toggle for light mode - scheme: default toggle: icon: material/brightness-7 name: Switch to dark mode # Palette toggle for dark mode - scheme: slate toggle: icon: material/brightness-4 name: Switch to light mode ``` === \"Light/dark toggle + Color change\" ``` yaml palette: # Palette toggle for light mode - scheme: default toggle: icon: material/brightness-7 name: Switch to dark mode primary: red accent: red # Palette toggle for dark mode - scheme: slate toggle: icon: material/brightness-4 name: Switch to light mode primary: red accent: red ``` Note How to enable content tabs in mkdocs.yaml : markdown_extensions : - pymdownx.superfences - pymdownx.tabbed : alternate_style : true I also discovered that if you select a command, right click and select Change All Occurrences in VSCode, you only need to write the new color once, instead of four times. Nice! Finally, I made a logo in Inkscape. I designed this logo in grade school, it's a kind of Icelandic rune that combines my initials S and K. Then I added two lines to mkdocs.yml to change the logo in the top left corner and also the favicon (the icon you see in the browser tab). theme : logo : images\\SK_logo.svg favicon : images\\SK_logo.svg First I wrote the path as /images/SK_logo.SVG and VSCode complained about the formatting. I found that you can right click the image and select Copy Relative Path to get the right formatting. That gave me docs\\images\\SK_logo.svg , which didn't work, but when I changed it to images\\SK_logo.svg it worked. I also enabled two navigation options: theme : features : - navigation.instant # Instant loading (page behaves like a single-page application, search persists between pages) - navigation.tabs # The pages are visible as tabs at the top instead of on the left hand side.","title":"Customizing the theme"},{"location":"assignments/week01.html#page-source","text":"The Markdown code for the View page Source button is like this: [View page source](setup.txt){ .md-button } I'm going to put it on every page of my documentation. If you see an interesting element in the page, you can then easily see how to set it up. Pointing to a .md file doesn't work, so my workaround is to make a copy of the Markdown source file and change its extension to .txt . I made a Python script using this tutorial and put it in the MkDocs folder. The script copies all the .md files in the docs folder to another folder called textfiles and converts their extension to .txt. The View Page Source button at the bottom of each page links to its respective .txt file. The Python code has some bug, so that it only converts code.md to code.txt, but I'm happy that I was able to get that far. To change the theme for just the home page, I followed tmeuze's advice in this issue . To mkdocs.yml I added custom_dir : docs/overrides and created a docs/overrides folder. Then I was unsure how to set up a custom theme, so I stopped there. To enable the Github Repository button in the top right corner, I followed this example and added the following to my mkdocs.yml , just below site_name : repo_name : Github Repository repo_url : https://github.com/svavarkonn/MkDocs I added theme : features : - navigation.tracking so that the URL in the address bar changes as you scroll down the page. If you copy the URL, the page will open in the section where you were when you copied it. Might be convenient if someone wants to link to something on this site. I also added theme : features : - navigation.tabs - navigation.tabs.sticky to make the top navigation follow you as you scroll down the page. By default, an \"Edit this page\" symbol is added to the right of the headline of every page. When you click it you just get a 404 error. I followed this to remove the edit button. I just add the CSS code < style > . md-content__button { display : none ; } </ style > to the .md file of each page and voil\u00e1! The edit button disappears. To enable icons and emojis, I followed the Material for MkDocs documentation on icons and emojis and added the following to mkdocs.yml : markdown_extensions: - attr_list - pymdownx.emoji: emoji_index: !!python/name:materialx.emoji.twemoji emoji_generator: !!python/name:materialx.emoji.to_svg Now I can make faces :smile: (Hmm, apparently this isn't working anymore.) To enable keyboard keys like Ctrl + Alt + Del , I added the following to mkdocs.yml : markdown_extensions : - pymdownx.keys Now I can add keyboard keys into the text by enclosing the expression with ++, and using one + between keys. The buttons above are made by typing ++ctrl+alt+del++ . Here is the key reference. There is no way to make image captions in Markdown. This seems like a glaring omission. I used this method of putting the caption in the next line after the image tag and enclosing the caption with **. Like this: ![ Git discussion ]( images/hategit.PNG ) *Some sentiments about Git* The image and caption are displayed like this: Sentiments about Git The caption is inline with the image, which is not great, but the workflow is simple, so I'm keeping it. If the caption doesn't work, put it inline with the image tag. View page source I've stopped using the page source button above, which links to a text file that I need to update manually. I've instead added an icon next to the page title at the top. I got the icon from Iconify . The icon links to the page source in the Github repository.","title":"Page source"},{"location":"assignments/week01.html#mathjax","text":"I installed MathJax by following the steps in the Material for MkDocs documentation . Now I can display beautiful equations on my website, using LaTex syntax.","title":"MathJax"},{"location":"assignments/week01.html#git-setup","text":"I cloned my Fab Academy repository on Gitlab to my computer with $ git clone https://gitlab.fabcloud.org/academany/fabacademy/2023/labs/isafjordur/students/svavar-konradsson.git Cloning into 'svavar-konradsson'... remote: Enumerating objects: 15, done. remote: Counting objects: 100% (15/15), done. remote: Compressing objects: 100% (14/14), done. remote: Total 15 (delta 1), reused 0 (delta 0), pack-reused 0 Receiving objects: 100% (15/15), 28.91 KiB | 7.23 MiB/s, done. Resolving deltas: 100% (1/1), done. Then I edited index.html a little bit and tried pushing the change to the online repo on Gitlab: git push warning: missing OAuth configuration for gitlab.fabcloud.org - see https://aka.ms/gcm/gitlab for more information remote: HTTP Basic: Access denied. The provided password or token is incorrect or your account has 2FA enabled and you must use a personal access token instead of a password. See https://gitlab.fabcloud.org/help/topics/git/troubleshooting_git#error-on-git-fetch-http-basic-access-denied fatal: Authentication failed for 'https://gitlab.fabcloud.org/academany/fabacademy/2023/labs/isafjordur/students/svavar-konradsson.git/' I looked up how to get an access token and my instructor \u00de\u00f3rarinn asked \u00c1rni at Fab Lab Akureyri if tokens are the thing to use. \u00c1rni recommended using an SSH key instead. I managed to generate an ssh key using this tutorial : $ ssh-keygen -t ed25519 -C \"generate an ssh key for gitlab to clone my repository\" Generating public/private ed25519 key pair. Enter file in which to save the key (/n//.ssh/id_ed25519): Enter passphrase (empty for no passphrase): Then I wasn't able to type anything as a passphrase. Then I found that I was able to type, but the Git Bash terminal just didn't show anything. I wrote a phrase twice and hit Enter. I got an SSH key. OK, I have an SSH key, but what do I do with it? And why? Why is this so complicated? I just want to upload some files to the internet. Then I found this tutorial on adding an SSH key to my Gitlab account and followed it blindly. I used $ cat ~/.ssh/id_ed25519.pub | clip to copy the contents of the SSH key file. Put it into the Gitlab account under profile -> SSH keys -> Add an SSH key. Then went into C:/code and said `git clone \"the thing I copied when I pressed clone in gitlab\"' Then I got the message: *** Please tell me who you are. Run git config --global user.email \"you@example.com\" git config --global user.name \"Your Name\" to set your account's default identity. I set the identity: PS C:\\code\\svavar-konradsson> git config --global user.email \"my@email.com\" PS C:\\code\\svavar-konradsson> git config --global user.name \"Svavar Konradsson\" and then said git clone That worked! I opened index.html, put my name into the title and saved. That appeared under Source control in Gitlab, I wrote a comment in a field and clicked Commit. Then a Sync button appeared and I pressed that and it pushed the site onto the online repo. Now I need to type my passphrase twice every time that I push files to the online repo. That's annoying, so I'm going to generate a new SSH key and skip the passphrase. I followed \u00c1rni Bj\u00f6rnsson documentation to generate an RSA key and put it into my Gitlab profile. Every time I made major changes in Windows Explorer; deleted lots of files and moved others, I needed to generate a new SSH key. The last one was ssh-keygen -t rsa -b 2048 cat ~/.ssh/id_rsa.pub | clip The MkDocs convention is to put the built web page into a folder called site, but Gitlab needs the web site to be in a folder called public. Finally I found the site_dir setting , which I can change in mkdocs.yml so that I can rename the site folder to public . At first I manually renamed site to public and pushed the files to the Gitlab repo. I got into trouble when the dot in front of the file .gitlab-ci.yml was erased somehow and the site wasn't deployed. My instructor \u00de\u00f3rarinn found the problem and after that the site worked. .md-content__button { display: none; }","title":"Git setup"},{"location":"assignments/week02.html","text":"Computer-Aided Design Raster image editing My instructor \u00de\u00f3rarinn gave me a quick demo of how to isolate an object in an image and make the background transparent. That will probably come in handy in many situations. We'll use Photopea , a free online (and downloadable) photo editor that closely resembles Photoshop. You can even go through Photoshop tutorials in Photopea. Let's start by taking a picture of a roll of tape with flat lighting, and we'll try to avoid having shadows. I'll use the magic wand to select pixels with the a similar color as the pixel under the mouse. Then I'll invert the selection and finally I'll make a mask. Next I will use the brush tool to remove the rest of the background. I can hold Alt and the right mouse button to adjust the size and softness of the brush. Now I brushed away the rest of the background. But sometimes I accidentally brush away part of the object. Tip from \u00de\u00f3rarinn: If you brush away too much, you can press X and switch to the other color in the mask. Then you can brush the object back into the picture. Finally, it can be good to add a color fill layer. It can make it easier to see spots that you missed with the brush. You need to drag the color layer below the image layer in the menu on the right. Then the color is behind the image. Missed a bit! Raster image compression To compress the images for the web I installed ImageMagick for Windows from the website. The installation included FFmpeg. I couldn't find any info on the website on how to get started (for someone who doesn't use command line tools), but then I came across Aaron Logan's Fab Academy site from 2022. He recommended following along with this video and that did the trick for me. It was useful to see the trick of writing cmd in the address bar of the folder that includes the images, and then the Windows Command Prompt opens in that directory. Failure I entered magick convert 2023-02-02 15_22_45-Photopea _ Online Photo Editor.png -resize 40% -quality 80 output2.jpg convert: unable to open image '2023-02-02': No such file or directory @ error/blob.c/OpenBlob/3569. convert: no decode delegate for this image format `' @ error/constitute.c/ReadImage/741. convert: unable to open image '15_22_45-Photopea': No such file or directory @ error/blob.c/OpenBlob/3569. convert: unable to open image '15_22_45-Photopea': No such file or directory @ error/blob.c/OpenBlob/3569. convert: no decode delegate for this image format `' @ error/constitute.c/ReadImage/741. convert: unable to open image '_': No such file or directory @ error/blob.c/OpenBlob/3569. convert: unable to open image '_': No such file or directory @ error/blob.c/OpenBlob/3569. convert: no decode delegate for this image format `' @ error/constitute.c/ReadImage/741. convert: unable to open image 'Online': No such file or directory @ error/blob.c/OpenBlob/3569. convert: unable to open image 'Online': No such file or directory @ error/blob.c/OpenBlob/3569. convert: no decode delegate for this image format `' @ error/constitute.c/ReadImage/741. convert: unable to open image 'Photo': No such file or directory @ error/blob.c/OpenBlob/3569. convert: unable to open image 'Photo': No such file or directory @ error/blob.c/OpenBlob/3569. convert: no decode delegate for this image format `' @ error/constitute.c/ReadImage/741. convert: unable to open image 'Editor.png': No such file or directory @ error/blob.c/OpenBlob/3569. convert: unable to open image 'Editor.png': No such file or directory @ error/blob.c/OpenBlob/3569. convert: no images defined `output2.jpg' @ error/convert.c/ConvertImageCommand/3342. I guess ImageMagick didn't like the file name that the Greenshot screen grabbing tool generates. My instructor \u00de\u00f3rarinn recommended Greenshot to me, it's lightweight and because it freezes the screen, you can grab screenshots where your mouse shows up. I tried renaming one of the files to 1.png and then the ImageMagick compression command worked: C:\\code\\FabAcademyImagesUncompressed\\week02>magick convert 1.png -resize 40% -quality 80 output1.jpg But I wanted to convert a whole folder of images with one command. This batch resize tutorial showed my how to set the width of the resized image while keeping the original aspect ratio. Now I put all the uncompressed images for the week in one folder and run magick mogrify -resize 560 *.png to resize all the images in the folder to be 560 pixels wide. I didn't even have to rename them this time. Then I run magick mogrify -format jpg *.png to convert all the images in the folder to JPEG format. Be aware that mogrify doesn't copy the images but changes the originals. So I just make copies of them first and put them all in one folder before running the mogrify . Here's the mogrify documentation . To resize and compress single images as I go along, I use magick convert image.png -resize 560 image.jpg Here is the finished product: 3D modeling in Blender Since I can generally get Fusion 360 to do what I want it to, I decided to learn a little bit about Blender. I've been missing a way to edit STL meshes and model organic stuff, so here we go. Right after you click Download, this Youtube tutorial series appears on the Blender website. So I got started and acquainted myself with the viewport navigation. Remember that you can press F to go to full screen on Youtube. Rotate by pressing and holding the XYZ axes in the image. You rotate by holding down the scroll button on the mouse, just like in SolidWorks. So that's convenient. There's also a set of XYZ axes in the top right corner that you can grab and rotate like the Autodesk ViewCube. You can also click top, side and front views. Nice. Clicking and holding the little hand button next to the axes moves the center of rotation. And you zoom by scrolling the wheel. The little camera symbol next to the axes lets you look through the camera that you've put into the scene (you can also press 0 on the number pad). Someone in the Youtube comments mentioned that F12 renders the camera view. So 0 and then F12 is a quick way to render the model. Next to the camera icon is a grid symbol, which switches between orthographic mode (good for modeling) and perspective mode (good for rendering realistic images). It's good to keep an eye on the status bar on the bottom, which shows you the context you're working in at the moment. Ctrl + Space is a convenient way to maximize any viewport or panel in the interface, and then you can press Ctrl + Space again to return to the default setup: Default interface (left) and Ctrl + Space to maximize side panel (right). There are several tabs at the top that represent different workspaces. In each workspace, the panels in the Blender interface have been set up for that particular task: All the viewports and panels can be resized. To choose a different panel, click the symbol with the down arrow in its top left corner: In the Layout workspace, pressing T will open commonly used tools like Move and Scale, and pressing ++N++ will open info about the selected object, tool-specific options and view options. Left menu is toggled with T , right menu is toggled with N . Notice the little tabs on the right side of the right menu called Item, Tool and View. In the View tab, you can edit the position of the 3D cursor, which acts as the spawning point for new objects and you can do things like pivot objects around it. In the Layout workspace, the Timeline below the viewport controls the playback of the animation. You can scroll to zoom in and out and hold the scroll wheel to pan. You can define start and end points and add key frames. Warning DON'T MAKE TUTORIALS In our weekly local Fab Academy meeting in Iceland, I mentioned how long the documentation was taking and that I wasn't getting anywhere with Blender. My instructor \u00de\u00f3rarinn said that I shouldn't be making a tutorial on how to use Blender but rather summarize what I did, what was interesting and then link to the tutorial that I used. That's a relief. The rest of this page will be more concise and interesting. I ended up quitting the official Blender tutorial after the third video and starting the doughnut tutorial by Blender Guru on Youtube. I remember things much better when I have a goal in mind and just use the tools I need to achieve it. I followed videos 1 through 6. I haven't modeled anything organic before, so it was interesting to try make the doughnut lopsided and kind of realistic. It goes against my engineering training, where you usually want things to be symmetrical. I liked getting to know subdivision modeling, to make smooth object using relatively few polygons. And I really liked that my 3D mouse works in Blender. The few times when I've attempted to edit a mesh, I've moved single vertices, resulting in gnarly, pointy looking objects. So I always gave up! But now that I know about proportional editing in Blender, I can select a single vertex and the vertices close to it will follow, while those further away will be affected less. This is much quicker and more effective. I love it! And thinking about my final project, the robot arm, it would be lovely to make a keypoint editor to move it, and use proportional editing to move the keypoints around the moved point, to get a smooth rolloff. I also want to remember the Alt + S shortcut to make the faces move directly outward (normal to the faces). I still have some way to go before I'll be confident in my mesh editing skills. Here's my first attempt to make the icing on the doughnut droop: Side view looks good, top view reveals what's actually going on. After some backtracking, the icing started to look good by direct editing of the vertices. Next I tried a bit of sculpting. That means using brushes to edit the mesh. I was able to make drops in the icing by sculpting it. Rendering in Blender I tried adjusting the camera view and the light to get a good result. I tried bot the Eevee real-time rendering engine and the more realistic, but much slower, Cycles ray-tracing rendering engine. The rendering was quite slow until I enabled the CUDA graphics card in the laptop. Final result The doughnut Blender file is 6.8 MB, so I won't include it here. Make your own doughnut instead, it takes about 2 hours and then you'll be able to model cool stuff in Blender. Blender simulation I also went through a really quick tutorial on cloth simulation. Here is the result: It's fascinating! There are doors opening for me left and right! Blender is a pretty incredible piece of sofware. The Blender file is 20.2 MB, so I won't include it here. You can make your own in ten minutes plus maybe 30 minutes of render time. 2D design in Inkscape I've started using Inkscape a bit since I started working at Fab Lab \u00cdsafj\u00f6r\u00f0ur. But I haven't used the Clone tool before. Here's a little experiment with cloning a rectangle: I didn't save the Inkscape file, it wasn't very interesting. 2D gear design in Fusion 360 \\[ c_{pitch} = width_{tooth} \\] Antimony modeler I set up a Raspberry Pi in the case that I need to use Linux during the course. I used this tutorial to enable cloud connection. That way I can connect to the Raspberry Pi from outside its network. When I connected remotely for the first time, the screen resolution was tiny because the Raspberry Pi wasn't connected to a monitor. I followed this tutorial to hardcode the screen resolution into the boot config text file. I tried to build Matt Keeter's Antimony modeler on the Raspberry Pi, but without luck. Failure sudo apt install git build-essential libpng-dev python3-dev libboost-all-dev libgl1-mesa-dev lemon flex qt5-default ninja-build cmake Reading package lists... Done Building dependency tree... Done Reading state information... Done Package qt5-default is not available, but is referred to by another package. This may mean that the package is missing, has been obsoleted, or is only available from another source E: Package 'qt5-default' has no installation candidate I tried to install qt5 instead of qt5-default, but it didn't work. Failure sudo apt install git build-essential qt5 Reading package lists... Done Building dependency tree... Done Reading state information... Done E: Unable to locate package qt5 I don't know where to go from there. Kokopelli modeler I also tried to build Matt Keeter's Kokopelli from source on the Raspberry Pi, but also without luck: Failure ``` sudo apt-get install python python-dev python-pip gcc g++ libpng12-dev make bash cmake Reading package lists... Done Building dependency tree... Done Reading state information... Done Note, selecting 'python-is-python2' instead of 'python' Note, selecting 'python-dev-is-python2' instead of 'python-dev' Package libpng12-dev is not available, but is referred to by another package. his may mean that the package is missing, has been obsoleted, or is only available from another source Package python-pip is not available, but is referred to by another package. This may mean that the package is missing, has been obsoleted, or is only available from another source However the following packages replace it: python3-pip E: Package 'python-pip' has no installation candidate E: Package 'libpng12-dev' has no installation candidate ``` Voxel modeling I tried the MagicaVoxel program without looking at a tutorial, and made this figure: I'm not sure how to make anything useful with this modeling method. Maybe I'll think of something later. Here's the voxel guy I made: Download voxel guy Solvespace parametric 3D CAD I went through a Solvespace tutorial and quite liked the experience. I like the dark 8-bit look of it. I was also surprised to find that my 3D mouse works in Solvespace. The program is lightweight and modeling seems quick, once you've memorized a few keyboard shortcuts and familiarized yourself with how the sketch constraints work. In the time that it took Fusion 360 to open, I saved the bracket and exported it as STEP, STL and a triangle mesh with a Three.js html viewer. You can open the 3D model in the browser below! Open bracket 3D model in browser Solvespace was written by Jonathan Westhues and he's made other impressive things too. This tube joint pattern generator would have been a lifesaver when we were building the steel tube spaceframe for the first and second Team Spark electric racing cars back in 2011 and 2012. Solvespace was maintained for a few years by M-Labs. M-Labs wrote the Three.js export feature, among others. Jonathan says himself that Solvespace's NURBS operations are not as good as OpenCASCADE's but they're much smaller. The constraint solver is a remarkable achievement though, and it works well. Jonathan originally made a 2D CAD program which was superseded by Solvespace, but the paper he wrote on the sketch solver is an interesting read. The Solvespace solver library is the solver behind CAD Sketcher in Blender. The Solvespace feature list mentions G-code export with cutter radius compensation and path traced with mechanism, exportable into a spreadsheet. These two are interesting. The next thing I want to try in Solvespace is to make a movable assembly. For me the killer feature in Solvespace is the Three.js export. The in-browser Three.js model even has the Solvespace look! The file includes the whole Three.js library, which means that the file for this simple bracket is 0.7 MB. So if you want to display more than one model they will take up a lot of space. In that case you may want to export only the js model geometries from Solvespace and then manually link them to the three.js file. The bracket model geometry is only 52 KB. Here's the Solvespace model for download: Download Solvespace bracket Rhino3D Rhino can do pretty much everything. A full license costs a thousand euros and you keep that version of Rhino for life. Even better, students and educators can get a license for 200 euros. But first, I'm going to get the trial version, which lasts for 90 days (all of the Fab Academy). I've wanted to try Rhino for quite some time, but all those unfamiliar tools and menus are a bit intimidating. I know solid extrude must be there, but where? I didn't like the official Getting Started tutorials very much, because the first videos just show you things but not making you do anything in the software. So I went to Youtube and found a 40 minute introduction to Rhino for architecture students. I followed along for 17 minutes. Selecting a surface with Ctrl + Shift left mouse in the first image and dragging the black dot on the red axis. That leads to the extrusion in the second image. I learned to make lines, curves, surfaces, solids and how to manipulate them.Now that I've turned on the Gumball tool, I can just select a surface and extrude it by pulling on the dot on one of the Gumball tool axes. Nice! In the above picture I'm rotating a surface, which changes the whole shape. Rhino seems to have many more tools than Fusion 360, so it's interesting for making complicated shapes. I especially like the ability to grab any point, line or surface and move and rotate them with the Gumball tool. That's a really quick way to make interesting shapes that I would find difficult to model in Fusion 360. But I still haven't learned how to model precise things in Rhino with dimensions and constraints. Here's the Rhino file I made (it's just nonsense like you see in the images): Download Rhino model Rhino + Grasshopper I went through a quick tutorial and made a nice Voronoi pattern! I really like the possibilities in Grasshopper. I've wanted to try it for some time. And I like the simplicity of the Voronoi method, you just make a line midway between every point and its next neighbor, and then trim the lines. A random distribution of points results in a cellular-like pattern. Here's the Rhino file with the extruded Voronoi surface.: Download Rhino + Grasshopper Voronoi model FreeCAD After watching this video , I got excited about learning FreeCAD, but I'm 20 minutes in to a tutorial and I've only learned how to enable dark mode, and I've discovered that my 3D mouse doesn't work well with the program. Or maybe I haven't found the right settings. Update: Now I've started modeling and I'm starting to like FreeCAD. It's incredibly capable. I only stuck with it because Neil Gershenfeld is so adamant that it's a good tool and that his Fab Academy students should try it. The feature tree is very similar to Inventor and SolidWorks. You can go back in time and change features that control features later in the tree. I could get used to this. Hold on! There's an HTML export option! And it uses native WebGL, without the Three.js library on top of it. And the bracket model is a tiny 60 KB, including the viewer! Look! Open FreeCAD bracket in browser The HTML file is human-readable, so I could easily go into it and change the background gradient colors. The bracket itself still has a yellowish color on the bottom, but this is good enough for now. Open FreeCAD bracket with greytone background Here's the FreeCAD bracket model: Download FreeCAD bracket model Update: I also tried setting up a parametric model based on a spreadsheet. I followed this tutorial . Here's the model that includes a few configurations: Download configurable FreeCAD cube I would also like to try Python scripting in FreeCAD when I have the time. On free and open source engineering software I must admit that I've been prejudiced against free and open source versions of the engineering software that I've been using. If it's free there must be something wrong with it. I've assumed that it must be missing lots of features and that it must have a bad user interface and be riddled with bugs. Not so! And there are features in the free software that are not found in paid software at any price. Autodesk and Dassault Syst\u00e9mes, the makers of the CAD software I use most, have thousands of employees. FreeCAD is developed by like three people and it can do finite element analysis! How is this possible? Because of the ecosystem of open-source packages that get integrated into FreeCAD, like the Open SCAD workbench, for example. And the open nature of the software ensures that those who are interested can learn everything about it, down to the lowest level, and improve it and make their own versions of it. This is similar to the original purpose of published patents, to disseminate technology for everyone. It's interesting to note that an old open source project that was developed in the late 1960s is still the state of the art in structural Finite Element Analysis . Nastran is a million lines of FORTRAN code written for NASA by the company that would become MSC. Nastran is the core in the most advanced FEA solvers in Autodesk Inventor and even in more exotic and cutting-edge software like ADINA , which is used for the most difficult multiphysics problems like simulating nuclear reactors before they are built. I came across ADINA in my quest to understand shock absorbers . They are surprisingly complex. ADINA makes an FEA solver that is more advanced than the Siemens NX solver, and that is saying something. NX is arguably the most advanced CAD software in the world. Its geometry kernel is also the basis of the Onshape, Shapr3D and Altair CAD software. CADtron I didn't try Kevin Lynagh's CADtron because it hasn't been released yet, but it's an interesting gesture-based 2D CAD program based on the Solvespace library. Here's an example of a perpendicular constraint drawn with a stylus (from Kevin's video): Fusion 360 Final project mockup I made a mockup of my final project in Fusion 360. I connected the parts together with joints, so that I could move the arm realistically. This arm has six axes. I imported a few electronic components from GrabCAD and put them on the arm to show how I envision the design. I want to make the arm out of printed circuit boards. The are fiber reinforced, so they are quite stiff. If I place some rigid foam between two circuit boards I have a very stiff and light part that also contains the microcontroller and motor driver to control the joint. I haven't seen a robot arm made of PCBs anywhere, so that's what I want to make. My instructor \u00de\u00f3rarinn suggested that I think about designing the arm in such a way that it could also be laser cut or 3D printed, depending on the fabrication tools that people have access to. So here are three versions, rendered in the Fusion 360 rendering environment: Tan colored FR1 circuit board arm Red 3D printed arm Transparent acrylic laser cut arm Motion study I had trouble figuring out the Fusion 360 Animation environment, so I did a motion study instead. Before making the motion study, I needed to create rotational joints between the parts using the Joint operation (see in the menu above). OK, let's go back and define all the joints first, and make the base fixed to the ground, so that it doesn't rotate with the other parts. It's quite simple to select the hole on each part to rotate around and that's it, really. You may need to play around with some of the settings until you get what you want. But there aren't that many settings. All the joints that I defined automatically appear in the motion study as a colored line. I can place keypoints on those lines at certain times to create motion in those joints. Then it's just a matter of figuring out which joint is which and its extents of motion and playing around with the interface until the movement looks good. Note To capture short animated GIFs of what I'm doing in software, I use LICEcap . It's available for Windows and MacOS. It's lightweight and produces much smaller GIFs than any other screen capture program that I've used. Because of this recommendation , I used the command line tool Gifsicle to resize the GIF after recording it with LICECap . I tried double-clicking gifsicle.exe, but nothing happened. I found this Reddit discussion , where it was explained that Gifsicle is a command-line tool, so you just enter the commands. But where? I tried the same method as with ImageMagick. I put the GIF into the folder with Gifsicle and typed cmd Enter , which opened the Command Prompt in that folder. Then I ended up using the command gifsicle motion_study5.gif --resize 660x220 --colors 256 --optimize > motion_study5_resized.gif and that worked! The GIF went from 844 KB to 200 KB. I quite like the control interface for the motion study. Each line controls a joint. You click the line to make a keypoint and enter the position of that joint. This is more intuitive than the robot control software that I've tried. It would be nice to control the arm in this way. Someone on the internet said that Fusion 360 is not the right tool to control robot arms, and they're probably right. They recommended Blender. I've been thinking about writing a Python script for Blender that uses the animation environment to control my robot arm. Or I could try a browser-based solution . I saw something like this when searching the Fab Academy archives. Here's the robot arm Fusion 360 file, including the conveyor belt and motion study: Download robot arm mockup .md-content__button { display: none; }","title":"2. Computer-Aided Design"},{"location":"assignments/week02.html#computer-aided-design","text":"","title":"Computer-Aided Design   "},{"location":"assignments/week02.html#raster-image-editing","text":"My instructor \u00de\u00f3rarinn gave me a quick demo of how to isolate an object in an image and make the background transparent. That will probably come in handy in many situations. We'll use Photopea , a free online (and downloadable) photo editor that closely resembles Photoshop. You can even go through Photoshop tutorials in Photopea. Let's start by taking a picture of a roll of tape with flat lighting, and we'll try to avoid having shadows. I'll use the magic wand to select pixels with the a similar color as the pixel under the mouse. Then I'll invert the selection and finally I'll make a mask. Next I will use the brush tool to remove the rest of the background. I can hold Alt and the right mouse button to adjust the size and softness of the brush. Now I brushed away the rest of the background. But sometimes I accidentally brush away part of the object. Tip from \u00de\u00f3rarinn: If you brush away too much, you can press X and switch to the other color in the mask. Then you can brush the object back into the picture. Finally, it can be good to add a color fill layer. It can make it easier to see spots that you missed with the brush. You need to drag the color layer below the image layer in the menu on the right. Then the color is behind the image. Missed a bit! Raster image compression To compress the images for the web I installed ImageMagick for Windows from the website. The installation included FFmpeg. I couldn't find any info on the website on how to get started (for someone who doesn't use command line tools), but then I came across Aaron Logan's Fab Academy site from 2022. He recommended following along with this video and that did the trick for me. It was useful to see the trick of writing cmd in the address bar of the folder that includes the images, and then the Windows Command Prompt opens in that directory. Failure I entered magick convert 2023-02-02 15_22_45-Photopea _ Online Photo Editor.png -resize 40% -quality 80 output2.jpg convert: unable to open image '2023-02-02': No such file or directory @ error/blob.c/OpenBlob/3569. convert: no decode delegate for this image format `' @ error/constitute.c/ReadImage/741. convert: unable to open image '15_22_45-Photopea': No such file or directory @ error/blob.c/OpenBlob/3569. convert: unable to open image '15_22_45-Photopea': No such file or directory @ error/blob.c/OpenBlob/3569. convert: no decode delegate for this image format `' @ error/constitute.c/ReadImage/741. convert: unable to open image '_': No such file or directory @ error/blob.c/OpenBlob/3569. convert: unable to open image '_': No such file or directory @ error/blob.c/OpenBlob/3569. convert: no decode delegate for this image format `' @ error/constitute.c/ReadImage/741. convert: unable to open image 'Online': No such file or directory @ error/blob.c/OpenBlob/3569. convert: unable to open image 'Online': No such file or directory @ error/blob.c/OpenBlob/3569. convert: no decode delegate for this image format `' @ error/constitute.c/ReadImage/741. convert: unable to open image 'Photo': No such file or directory @ error/blob.c/OpenBlob/3569. convert: unable to open image 'Photo': No such file or directory @ error/blob.c/OpenBlob/3569. convert: no decode delegate for this image format `' @ error/constitute.c/ReadImage/741. convert: unable to open image 'Editor.png': No such file or directory @ error/blob.c/OpenBlob/3569. convert: unable to open image 'Editor.png': No such file or directory @ error/blob.c/OpenBlob/3569. convert: no images defined `output2.jpg' @ error/convert.c/ConvertImageCommand/3342. I guess ImageMagick didn't like the file name that the Greenshot screen grabbing tool generates. My instructor \u00de\u00f3rarinn recommended Greenshot to me, it's lightweight and because it freezes the screen, you can grab screenshots where your mouse shows up. I tried renaming one of the files to 1.png and then the ImageMagick compression command worked: C:\\code\\FabAcademyImagesUncompressed\\week02>magick convert 1.png -resize 40% -quality 80 output1.jpg But I wanted to convert a whole folder of images with one command. This batch resize tutorial showed my how to set the width of the resized image while keeping the original aspect ratio. Now I put all the uncompressed images for the week in one folder and run magick mogrify -resize 560 *.png to resize all the images in the folder to be 560 pixels wide. I didn't even have to rename them this time. Then I run magick mogrify -format jpg *.png to convert all the images in the folder to JPEG format. Be aware that mogrify doesn't copy the images but changes the originals. So I just make copies of them first and put them all in one folder before running the mogrify . Here's the mogrify documentation . To resize and compress single images as I go along, I use magick convert image.png -resize 560 image.jpg Here is the finished product:","title":"Raster image editing"},{"location":"assignments/week02.html#3d-modeling-in-blender","text":"Since I can generally get Fusion 360 to do what I want it to, I decided to learn a little bit about Blender. I've been missing a way to edit STL meshes and model organic stuff, so here we go. Right after you click Download, this Youtube tutorial series appears on the Blender website. So I got started and acquainted myself with the viewport navigation. Remember that you can press F to go to full screen on Youtube. Rotate by pressing and holding the XYZ axes in the image. You rotate by holding down the scroll button on the mouse, just like in SolidWorks. So that's convenient. There's also a set of XYZ axes in the top right corner that you can grab and rotate like the Autodesk ViewCube. You can also click top, side and front views. Nice. Clicking and holding the little hand button next to the axes moves the center of rotation. And you zoom by scrolling the wheel. The little camera symbol next to the axes lets you look through the camera that you've put into the scene (you can also press 0 on the number pad). Someone in the Youtube comments mentioned that F12 renders the camera view. So 0 and then F12 is a quick way to render the model. Next to the camera icon is a grid symbol, which switches between orthographic mode (good for modeling) and perspective mode (good for rendering realistic images). It's good to keep an eye on the status bar on the bottom, which shows you the context you're working in at the moment. Ctrl + Space is a convenient way to maximize any viewport or panel in the interface, and then you can press Ctrl + Space again to return to the default setup: Default interface (left) and Ctrl + Space to maximize side panel (right). There are several tabs at the top that represent different workspaces. In each workspace, the panels in the Blender interface have been set up for that particular task: All the viewports and panels can be resized. To choose a different panel, click the symbol with the down arrow in its top left corner: In the Layout workspace, pressing T will open commonly used tools like Move and Scale, and pressing ++N++ will open info about the selected object, tool-specific options and view options. Left menu is toggled with T , right menu is toggled with N . Notice the little tabs on the right side of the right menu called Item, Tool and View. In the View tab, you can edit the position of the 3D cursor, which acts as the spawning point for new objects and you can do things like pivot objects around it. In the Layout workspace, the Timeline below the viewport controls the playback of the animation. You can scroll to zoom in and out and hold the scroll wheel to pan. You can define start and end points and add key frames. Warning DON'T MAKE TUTORIALS In our weekly local Fab Academy meeting in Iceland, I mentioned how long the documentation was taking and that I wasn't getting anywhere with Blender. My instructor \u00de\u00f3rarinn said that I shouldn't be making a tutorial on how to use Blender but rather summarize what I did, what was interesting and then link to the tutorial that I used. That's a relief. The rest of this page will be more concise and interesting. I ended up quitting the official Blender tutorial after the third video and starting the doughnut tutorial by Blender Guru on Youtube. I remember things much better when I have a goal in mind and just use the tools I need to achieve it. I followed videos 1 through 6. I haven't modeled anything organic before, so it was interesting to try make the doughnut lopsided and kind of realistic. It goes against my engineering training, where you usually want things to be symmetrical. I liked getting to know subdivision modeling, to make smooth object using relatively few polygons. And I really liked that my 3D mouse works in Blender. The few times when I've attempted to edit a mesh, I've moved single vertices, resulting in gnarly, pointy looking objects. So I always gave up! But now that I know about proportional editing in Blender, I can select a single vertex and the vertices close to it will follow, while those further away will be affected less. This is much quicker and more effective. I love it! And thinking about my final project, the robot arm, it would be lovely to make a keypoint editor to move it, and use proportional editing to move the keypoints around the moved point, to get a smooth rolloff. I also want to remember the Alt + S shortcut to make the faces move directly outward (normal to the faces). I still have some way to go before I'll be confident in my mesh editing skills. Here's my first attempt to make the icing on the doughnut droop: Side view looks good, top view reveals what's actually going on. After some backtracking, the icing started to look good by direct editing of the vertices. Next I tried a bit of sculpting. That means using brushes to edit the mesh. I was able to make drops in the icing by sculpting it.","title":"3D modeling in Blender"},{"location":"assignments/week02.html#rendering-in-blender","text":"I tried adjusting the camera view and the light to get a good result. I tried bot the Eevee real-time rendering engine and the more realistic, but much slower, Cycles ray-tracing rendering engine. The rendering was quite slow until I enabled the CUDA graphics card in the laptop. Final result The doughnut Blender file is 6.8 MB, so I won't include it here. Make your own doughnut instead, it takes about 2 hours and then you'll be able to model cool stuff in Blender.","title":"Rendering in Blender"},{"location":"assignments/week02.html#blender-simulation","text":"I also went through a really quick tutorial on cloth simulation. Here is the result: It's fascinating! There are doors opening for me left and right! Blender is a pretty incredible piece of sofware. The Blender file is 20.2 MB, so I won't include it here. You can make your own in ten minutes plus maybe 30 minutes of render time.","title":"Blender simulation"},{"location":"assignments/week02.html#2d-design-in-inkscape","text":"I've started using Inkscape a bit since I started working at Fab Lab \u00cdsafj\u00f6r\u00f0ur. But I haven't used the Clone tool before. Here's a little experiment with cloning a rectangle: I didn't save the Inkscape file, it wasn't very interesting.","title":"2D design in Inkscape"},{"location":"assignments/week02.html#2d-gear-design-in-fusion-360","text":"\\[ c_{pitch} = width_{tooth} \\]","title":"2D gear design in Fusion 360"},{"location":"assignments/week02.html#antimony-modeler","text":"I set up a Raspberry Pi in the case that I need to use Linux during the course. I used this tutorial to enable cloud connection. That way I can connect to the Raspberry Pi from outside its network. When I connected remotely for the first time, the screen resolution was tiny because the Raspberry Pi wasn't connected to a monitor. I followed this tutorial to hardcode the screen resolution into the boot config text file. I tried to build Matt Keeter's Antimony modeler on the Raspberry Pi, but without luck. Failure sudo apt install git build-essential libpng-dev python3-dev libboost-all-dev libgl1-mesa-dev lemon flex qt5-default ninja-build cmake Reading package lists... Done Building dependency tree... Done Reading state information... Done Package qt5-default is not available, but is referred to by another package. This may mean that the package is missing, has been obsoleted, or is only available from another source E: Package 'qt5-default' has no installation candidate I tried to install qt5 instead of qt5-default, but it didn't work. Failure sudo apt install git build-essential qt5 Reading package lists... Done Building dependency tree... Done Reading state information... Done E: Unable to locate package qt5 I don't know where to go from there.","title":"Antimony modeler"},{"location":"assignments/week02.html#kokopelli-modeler","text":"I also tried to build Matt Keeter's Kokopelli from source on the Raspberry Pi, but also without luck: Failure ``` sudo apt-get install python python-dev python-pip gcc g++ libpng12-dev make bash cmake Reading package lists... Done Building dependency tree... Done Reading state information... Done Note, selecting 'python-is-python2' instead of 'python' Note, selecting 'python-dev-is-python2' instead of 'python-dev' Package libpng12-dev is not available, but is referred to by another package. his may mean that the package is missing, has been obsoleted, or is only available from another source Package python-pip is not available, but is referred to by another package. This may mean that the package is missing, has been obsoleted, or is only available from another source However the following packages replace it: python3-pip E: Package 'python-pip' has no installation candidate E: Package 'libpng12-dev' has no installation candidate ```","title":"Kokopelli modeler"},{"location":"assignments/week02.html#voxel-modeling","text":"I tried the MagicaVoxel program without looking at a tutorial, and made this figure: I'm not sure how to make anything useful with this modeling method. Maybe I'll think of something later. Here's the voxel guy I made: Download voxel guy","title":"Voxel modeling"},{"location":"assignments/week02.html#solvespace-parametric-3d-cad","text":"I went through a Solvespace tutorial and quite liked the experience. I like the dark 8-bit look of it. I was also surprised to find that my 3D mouse works in Solvespace. The program is lightweight and modeling seems quick, once you've memorized a few keyboard shortcuts and familiarized yourself with how the sketch constraints work. In the time that it took Fusion 360 to open, I saved the bracket and exported it as STEP, STL and a triangle mesh with a Three.js html viewer. You can open the 3D model in the browser below! Open bracket 3D model in browser Solvespace was written by Jonathan Westhues and he's made other impressive things too. This tube joint pattern generator would have been a lifesaver when we were building the steel tube spaceframe for the first and second Team Spark electric racing cars back in 2011 and 2012. Solvespace was maintained for a few years by M-Labs. M-Labs wrote the Three.js export feature, among others. Jonathan says himself that Solvespace's NURBS operations are not as good as OpenCASCADE's but they're much smaller. The constraint solver is a remarkable achievement though, and it works well. Jonathan originally made a 2D CAD program which was superseded by Solvespace, but the paper he wrote on the sketch solver is an interesting read. The Solvespace solver library is the solver behind CAD Sketcher in Blender. The Solvespace feature list mentions G-code export with cutter radius compensation and path traced with mechanism, exportable into a spreadsheet. These two are interesting. The next thing I want to try in Solvespace is to make a movable assembly. For me the killer feature in Solvespace is the Three.js export. The in-browser Three.js model even has the Solvespace look! The file includes the whole Three.js library, which means that the file for this simple bracket is 0.7 MB. So if you want to display more than one model they will take up a lot of space. In that case you may want to export only the js model geometries from Solvespace and then manually link them to the three.js file. The bracket model geometry is only 52 KB. Here's the Solvespace model for download: Download Solvespace bracket","title":"Solvespace parametric 3D CAD"},{"location":"assignments/week02.html#rhino3d","text":"Rhino can do pretty much everything. A full license costs a thousand euros and you keep that version of Rhino for life. Even better, students and educators can get a license for 200 euros. But first, I'm going to get the trial version, which lasts for 90 days (all of the Fab Academy). I've wanted to try Rhino for quite some time, but all those unfamiliar tools and menus are a bit intimidating. I know solid extrude must be there, but where? I didn't like the official Getting Started tutorials very much, because the first videos just show you things but not making you do anything in the software. So I went to Youtube and found a 40 minute introduction to Rhino for architecture students. I followed along for 17 minutes. Selecting a surface with Ctrl + Shift left mouse in the first image and dragging the black dot on the red axis. That leads to the extrusion in the second image. I learned to make lines, curves, surfaces, solids and how to manipulate them.Now that I've turned on the Gumball tool, I can just select a surface and extrude it by pulling on the dot on one of the Gumball tool axes. Nice! In the above picture I'm rotating a surface, which changes the whole shape. Rhino seems to have many more tools than Fusion 360, so it's interesting for making complicated shapes. I especially like the ability to grab any point, line or surface and move and rotate them with the Gumball tool. That's a really quick way to make interesting shapes that I would find difficult to model in Fusion 360. But I still haven't learned how to model precise things in Rhino with dimensions and constraints. Here's the Rhino file I made (it's just nonsense like you see in the images): Download Rhino model","title":"Rhino3D"},{"location":"assignments/week02.html#rhino-grasshopper","text":"I went through a quick tutorial and made a nice Voronoi pattern! I really like the possibilities in Grasshopper. I've wanted to try it for some time. And I like the simplicity of the Voronoi method, you just make a line midway between every point and its next neighbor, and then trim the lines. A random distribution of points results in a cellular-like pattern. Here's the Rhino file with the extruded Voronoi surface.: Download Rhino + Grasshopper Voronoi model","title":"Rhino + Grasshopper"},{"location":"assignments/week02.html#freecad","text":"After watching this video , I got excited about learning FreeCAD, but I'm 20 minutes in to a tutorial and I've only learned how to enable dark mode, and I've discovered that my 3D mouse doesn't work well with the program. Or maybe I haven't found the right settings. Update: Now I've started modeling and I'm starting to like FreeCAD. It's incredibly capable. I only stuck with it because Neil Gershenfeld is so adamant that it's a good tool and that his Fab Academy students should try it. The feature tree is very similar to Inventor and SolidWorks. You can go back in time and change features that control features later in the tree. I could get used to this. Hold on! There's an HTML export option! And it uses native WebGL, without the Three.js library on top of it. And the bracket model is a tiny 60 KB, including the viewer! Look! Open FreeCAD bracket in browser The HTML file is human-readable, so I could easily go into it and change the background gradient colors. The bracket itself still has a yellowish color on the bottom, but this is good enough for now. Open FreeCAD bracket with greytone background Here's the FreeCAD bracket model: Download FreeCAD bracket model Update: I also tried setting up a parametric model based on a spreadsheet. I followed this tutorial . Here's the model that includes a few configurations: Download configurable FreeCAD cube I would also like to try Python scripting in FreeCAD when I have the time.","title":"FreeCAD"},{"location":"assignments/week02.html#on-free-and-open-source-engineering-software","text":"I must admit that I've been prejudiced against free and open source versions of the engineering software that I've been using. If it's free there must be something wrong with it. I've assumed that it must be missing lots of features and that it must have a bad user interface and be riddled with bugs. Not so! And there are features in the free software that are not found in paid software at any price. Autodesk and Dassault Syst\u00e9mes, the makers of the CAD software I use most, have thousands of employees. FreeCAD is developed by like three people and it can do finite element analysis! How is this possible? Because of the ecosystem of open-source packages that get integrated into FreeCAD, like the Open SCAD workbench, for example. And the open nature of the software ensures that those who are interested can learn everything about it, down to the lowest level, and improve it and make their own versions of it. This is similar to the original purpose of published patents, to disseminate technology for everyone. It's interesting to note that an old open source project that was developed in the late 1960s is still the state of the art in structural Finite Element Analysis . Nastran is a million lines of FORTRAN code written for NASA by the company that would become MSC. Nastran is the core in the most advanced FEA solvers in Autodesk Inventor and even in more exotic and cutting-edge software like ADINA , which is used for the most difficult multiphysics problems like simulating nuclear reactors before they are built. I came across ADINA in my quest to understand shock absorbers . They are surprisingly complex. ADINA makes an FEA solver that is more advanced than the Siemens NX solver, and that is saying something. NX is arguably the most advanced CAD software in the world. Its geometry kernel is also the basis of the Onshape, Shapr3D and Altair CAD software.","title":"On free and open source engineering software"},{"location":"assignments/week02.html#cadtron","text":"I didn't try Kevin Lynagh's CADtron because it hasn't been released yet, but it's an interesting gesture-based 2D CAD program based on the Solvespace library. Here's an example of a perpendicular constraint drawn with a stylus (from Kevin's video):","title":"CADtron"},{"location":"assignments/week02.html#fusion-360","text":"","title":"Fusion 360"},{"location":"assignments/week02.html#final-project-mockup","text":"I made a mockup of my final project in Fusion 360. I connected the parts together with joints, so that I could move the arm realistically. This arm has six axes. I imported a few electronic components from GrabCAD and put them on the arm to show how I envision the design. I want to make the arm out of printed circuit boards. The are fiber reinforced, so they are quite stiff. If I place some rigid foam between two circuit boards I have a very stiff and light part that also contains the microcontroller and motor driver to control the joint. I haven't seen a robot arm made of PCBs anywhere, so that's what I want to make. My instructor \u00de\u00f3rarinn suggested that I think about designing the arm in such a way that it could also be laser cut or 3D printed, depending on the fabrication tools that people have access to. So here are three versions, rendered in the Fusion 360 rendering environment: Tan colored FR1 circuit board arm Red 3D printed arm Transparent acrylic laser cut arm","title":"Final project mockup"},{"location":"assignments/week02.html#motion-study","text":"I had trouble figuring out the Fusion 360 Animation environment, so I did a motion study instead. Before making the motion study, I needed to create rotational joints between the parts using the Joint operation (see in the menu above). OK, let's go back and define all the joints first, and make the base fixed to the ground, so that it doesn't rotate with the other parts. It's quite simple to select the hole on each part to rotate around and that's it, really. You may need to play around with some of the settings until you get what you want. But there aren't that many settings. All the joints that I defined automatically appear in the motion study as a colored line. I can place keypoints on those lines at certain times to create motion in those joints. Then it's just a matter of figuring out which joint is which and its extents of motion and playing around with the interface until the movement looks good. Note To capture short animated GIFs of what I'm doing in software, I use LICEcap . It's available for Windows and MacOS. It's lightweight and produces much smaller GIFs than any other screen capture program that I've used. Because of this recommendation , I used the command line tool Gifsicle to resize the GIF after recording it with LICECap . I tried double-clicking gifsicle.exe, but nothing happened. I found this Reddit discussion , where it was explained that Gifsicle is a command-line tool, so you just enter the commands. But where? I tried the same method as with ImageMagick. I put the GIF into the folder with Gifsicle and typed cmd Enter , which opened the Command Prompt in that folder. Then I ended up using the command gifsicle motion_study5.gif --resize 660x220 --colors 256 --optimize > motion_study5_resized.gif and that worked! The GIF went from 844 KB to 200 KB. I quite like the control interface for the motion study. Each line controls a joint. You click the line to make a keypoint and enter the position of that joint. This is more intuitive than the robot control software that I've tried. It would be nice to control the arm in this way. Someone on the internet said that Fusion 360 is not the right tool to control robot arms, and they're probably right. They recommended Blender. I've been thinking about writing a Python script for Blender that uses the animation environment to control my robot arm. Or I could try a browser-based solution . I saw something like this when searching the Fab Academy archives. Here's the robot arm Fusion 360 file, including the conveyor belt and motion study: Download robot arm mockup .md-content__button { display: none; }","title":"Motion study"},{"location":"assignments/week03.html","text":"Computer-Controlled Cutting Parametric construction kit I wanted to make a minimal parametric construction kit that was made up of only one piece. You can see the design taking shape in the hand-drawn sketches below as I was thinking about the design specifications. When I had decided on the specs, the design was fully defined. The piece can have no other shape. I wanted four pins and four pockets, so that defined the shape of the piece. The plan was to have the dimensions of the pins and pockets equal on all three axes, so that the pieces can be assembled every which way. This ultimately means that all the dimensions in the 2D sketch are controlled by the material thickness. I made a sketch in FreeCAD and set up a spreadsheet inside the part file. In the spreadsheet I defined two main parameters that I use to control the design; the material thickness and the kerf. There I am editing the value of the kerf: I made two test pieces (see the image at the top of the page) and decided to make the hole in the middle a star instead of a square. That way you can assemble the pieces at a 45\u00b0 angle, which is necessary because the angle between every pin and pocket is 45\u00b0. Then I made a series of kerf tests, which was easy because I only needed to change one parameter. I wound up making the fit tighter than I usually do (kerf = 0.24 mm) because the fit is tighter when the pieces are assembled in-plane than when they are perpendicular to each other. The 90\u00b0 perpendicular fits were always too loose and I didn't understand why. Kerf tests Then I thought about the way that the kerf tapers, so that more material is removed at the bottom of the material than at the top surface. This is because the laser has only one focus point, which is set at the top surface of the material. Below that point, the laser beam diverges, causing the sides of the pieces to not be completely perpendicular to the surface of the sheet. My instructor \u00de\u00f3rarinn said that there isn't really anything we can do about that, so I tried setting the focus in the middle of the material, by focusing on a 2 mm thick sheet and then cutting a 4 mm thick sheet. I was hoping that the cut would have more of an hourglass shape than a simple taper, but it didn't work out that way. Changing the focus didn't make that much of a difference, but I ended up cutting all the pieces with it set in the middle of the material. Here's a full plate of construction pieces, patterned by hand in Inkscape: I used the Epilog Helix Mini 24 laser cutter to cut my construction kit. I used 5% speed, 100% power and 5000 Hz laser frequency. The Epilog Helix Mini 24 hard at work. The construction kit was very well received by my family. Here's Hj\u00f6rtur's (1 year old) hand touching something that my wife A\u00f0albj\u00f6rg made: After an evening of play, these are the resulting shapes. Wall-E, a drone, an axle with wheels, a cube, a coaster, a giraffe, a magic wand, an I-beam, and a tool to measure the curve of your spine. It works really well, it might be a handy tool for physiotherapists. I'm holding up Wall-E, which my older son Ernir (4 years old) made. This could be a template to measure spine curvature. Here are the design files: Download spreadsheet-driven FreeCAD design Download DXF file (one piece) Download SVG cutting file (462 pieces) Download PDF cutting file (462 pieces) Circuit cut with vinyl cutter I wanted to cut a copper sheet and make a circuit in the vinyl cutter. This hasn't been done before at my lab. My instructor \u00de\u00f3rarinn led me through the ins and outs of the vinyl cutter. He disassembled the knife to show me the tiny bearing that needs to be cleaned and lubricated when it jams: Then he explained the way to adjust the knife. You start by retracting the knife fully and then cutting manually into the material. If nothing happens, you extend the knife a little bit and cut again. You want to cut fully through the material and scratch the backing, but you don't want to cut through the backing. Then you would damage the rubber that is underneath, and that's part of the machine. You cut tabs like you see below and then bend the material and see if they come loose. When it looks like you're getting close, you start paying attention to the ticks on the knife. A course adjustment is two ticks, a fine adjustment is one tick. If the material comes loose when you bend it, but you're not cutting deep into the backing, you're golden: \u00de\u00f3rarinn suggested a methodology of testing to get good cuts in the copper. First I would adjust the knife as discussed above. The next step would be to vary the cutting force and the speed until I get a good result. However, after he adjusted the blade and the force for the regular vinyl, I made a test and it cut quite well through the copper! So the systematic testing ended up being just three tests. The first test didn't work out because the copper was crumpled on top of the backing. The second test I cut with 90 grams of force and it was almost there. The third test I cut with 120 grams of force and it looked good. The Roland CAMM-1 Servo GX-24 vinyl cutter. I used a force setting of 120 grams to cut the copper sheet. Here's my first try at weeding copper sheet: It worked, but could be better. Neil recommended sticking everything to the final surface and then weeding. I will definitely try that next time. But this time I weeded first and then glued the copper pads to the laser cut acrylic. I forgot to add holes for pins, so I drilled them afterwards: When I had my students make sensors like these, I added the holes to the laser cutting file. The only component on the board is a 10k resistor. I grabbed a 10k resistor and soldered it to the pads. I'm surprised that the tiny pads survived my rough handling. Then I measured the resistance from one pin to the touch sensitive pad, to check if the solder connections were OK: The soldering was fine, and the next step was to open a capacitive touch sensor example sketch in the Arduino IDE and connect my new sensor to a SparkFun RedBoard: Note I used ffmpeg -i input_video -vcodec libx264 -crf 25 -preset medium -vf scale=-2:1080 -acodec libmp3lame -q:a 4 -ar 48000 -ac 2 output_video.mp4 to compress the video from my phone. Fab Lab Kannai put this into a tutorial . It works! Here's the Arduino sketch, which I modified a little bit (just commented out what I didn't need): #include <CapacitiveSensor.h> /* * CapitiveSense Library Demo Sketch * Paul Badger 2008 * Uses a high value resistor e.g. 10M between send pin and receive pin * Resistor effects sensitivity, experiment with values, 50K - 50M. Larger resistor values yield larger sensor values. * Receive pin is the sensor pin - try different amounts of foil/metal on this pin */ //CapacitiveSensor cs_4_2 = CapacitiveSensor(4,2); // 10M resistor between pins 4 & 2, pin 2 is sensor pin, add a wire and or foil if desired CapacitiveSensor cs_4_6 = CapacitiveSensor ( 4 , 6 ); // 10M resistor between pins 4 & 6, pin 6 is sensor pin, add a wire and or foil //CapacitiveSensor cs_4_8 = CapacitiveSensor(4,8); // 10M resistor between pins 4 & 8, pin 8 is sensor pin, add a wire and or foil int LEDpin = 13 ; void setup () { // cs_4_2.set_CS_AutocaL_Millis(0xFFFFFFFF); // turn off autocalibrate on channel 1 - just as an example Serial . begin ( 9600 ); } void loop () { long start = millis (); // long total1 = cs_4_2.capacitiveSensor(30); long total2 = cs_4_6 . capacitiveSensor ( 30 ); // long total3 = cs_4_8.capacitiveSensor(30); // Serial.print(millis() - start); // check on performance in milliseconds // Serial.print(\"\\t\"); // tab character for debug windown spacing // Serial.print(total1); // print sensor output 1 Serial . print ( \" \\t \" ); Serial . println ( total2 ); // print sensor output 2 // Serial.print(\"\\t\"); // Serial.println(total3); // print sensor output 3 delay ( 10 ); // arbitrary delay to limit data to serial port // if (total3 > 40) // { // digitalWrite(LEDpin, HIGH); // } // else // { // digitalWrite(LEDpin, LOW); // } } Here are the design files: Download sensor vinyl cutting file Download outline laser cutting file Rubber stamp For the rubber stamp engraving, I used this reference . I was able to cut through the rubber at 5% speed and 100% power with out 40W Epilog Helix laser cutter: The engraving test that looked cleanest to me was at 30% speed and 100% power. I then engraved the Fab Lab \u00cdsafj\u00f6r\u00f0ur logo with two such passes and cut out the outline: As you can see, this makes for an awful stamp. I both forgot to mirror the logo and invert it, to make it stick out. I haven't had time to make a proper stamp yet, but the tests look promising. Here are the design files: Download rubber test file Download failed rubber stamp logo Download failed rubber stamp outline Kerf test The missing width when I've laser cut these ten lines is exactly 2 mm. That means that the laser's kerf (or the diameter of the laser point) is \\[\\frac{2mm}{10}=\\underline{0.2mm}\\] So when I design a press-fit joint, I need to offset all lines outward by 0.1 mm. Download kerf test Raster test I used the same raster test as I did with the rubber stamp. I set the laser power to 100% and the frequency to the maximum 5000 Hz. Then I enabled Color Mapping in the Epilog Laser settings within the Print dialog in the PDF viewer. Red is 60% speed, green is 50%, blue is 40%, yellow is 30%, magenta is 20% and cyan is 10%. That goes for both rastering (top row) and cutting (bottom row). Here's how the 4 mm MDF reacted to these settings: To get a clean raster, the speed must be above 30%. To cut through the material, the speed must be 10% or slower. I usually use 5% speed to make sure that the laser cuts through. Download raster test file .md-content__button { display: none; }","title":"3. Computer-Controlled Cutting"},{"location":"assignments/week03.html#computer-controlled-cutting","text":"","title":"Computer-Controlled Cutting   "},{"location":"assignments/week03.html#parametric-construction-kit","text":"I wanted to make a minimal parametric construction kit that was made up of only one piece. You can see the design taking shape in the hand-drawn sketches below as I was thinking about the design specifications. When I had decided on the specs, the design was fully defined. The piece can have no other shape. I wanted four pins and four pockets, so that defined the shape of the piece. The plan was to have the dimensions of the pins and pockets equal on all three axes, so that the pieces can be assembled every which way. This ultimately means that all the dimensions in the 2D sketch are controlled by the material thickness. I made a sketch in FreeCAD and set up a spreadsheet inside the part file. In the spreadsheet I defined two main parameters that I use to control the design; the material thickness and the kerf. There I am editing the value of the kerf: I made two test pieces (see the image at the top of the page) and decided to make the hole in the middle a star instead of a square. That way you can assemble the pieces at a 45\u00b0 angle, which is necessary because the angle between every pin and pocket is 45\u00b0. Then I made a series of kerf tests, which was easy because I only needed to change one parameter. I wound up making the fit tighter than I usually do (kerf = 0.24 mm) because the fit is tighter when the pieces are assembled in-plane than when they are perpendicular to each other. The 90\u00b0 perpendicular fits were always too loose and I didn't understand why. Kerf tests Then I thought about the way that the kerf tapers, so that more material is removed at the bottom of the material than at the top surface. This is because the laser has only one focus point, which is set at the top surface of the material. Below that point, the laser beam diverges, causing the sides of the pieces to not be completely perpendicular to the surface of the sheet. My instructor \u00de\u00f3rarinn said that there isn't really anything we can do about that, so I tried setting the focus in the middle of the material, by focusing on a 2 mm thick sheet and then cutting a 4 mm thick sheet. I was hoping that the cut would have more of an hourglass shape than a simple taper, but it didn't work out that way. Changing the focus didn't make that much of a difference, but I ended up cutting all the pieces with it set in the middle of the material. Here's a full plate of construction pieces, patterned by hand in Inkscape: I used the Epilog Helix Mini 24 laser cutter to cut my construction kit. I used 5% speed, 100% power and 5000 Hz laser frequency. The Epilog Helix Mini 24 hard at work. The construction kit was very well received by my family. Here's Hj\u00f6rtur's (1 year old) hand touching something that my wife A\u00f0albj\u00f6rg made: After an evening of play, these are the resulting shapes. Wall-E, a drone, an axle with wheels, a cube, a coaster, a giraffe, a magic wand, an I-beam, and a tool to measure the curve of your spine. It works really well, it might be a handy tool for physiotherapists. I'm holding up Wall-E, which my older son Ernir (4 years old) made. This could be a template to measure spine curvature. Here are the design files: Download spreadsheet-driven FreeCAD design Download DXF file (one piece) Download SVG cutting file (462 pieces) Download PDF cutting file (462 pieces)","title":"Parametric construction kit"},{"location":"assignments/week03.html#circuit-cut-with-vinyl-cutter","text":"I wanted to cut a copper sheet and make a circuit in the vinyl cutter. This hasn't been done before at my lab. My instructor \u00de\u00f3rarinn led me through the ins and outs of the vinyl cutter. He disassembled the knife to show me the tiny bearing that needs to be cleaned and lubricated when it jams: Then he explained the way to adjust the knife. You start by retracting the knife fully and then cutting manually into the material. If nothing happens, you extend the knife a little bit and cut again. You want to cut fully through the material and scratch the backing, but you don't want to cut through the backing. Then you would damage the rubber that is underneath, and that's part of the machine. You cut tabs like you see below and then bend the material and see if they come loose. When it looks like you're getting close, you start paying attention to the ticks on the knife. A course adjustment is two ticks, a fine adjustment is one tick. If the material comes loose when you bend it, but you're not cutting deep into the backing, you're golden: \u00de\u00f3rarinn suggested a methodology of testing to get good cuts in the copper. First I would adjust the knife as discussed above. The next step would be to vary the cutting force and the speed until I get a good result. However, after he adjusted the blade and the force for the regular vinyl, I made a test and it cut quite well through the copper! So the systematic testing ended up being just three tests. The first test didn't work out because the copper was crumpled on top of the backing. The second test I cut with 90 grams of force and it was almost there. The third test I cut with 120 grams of force and it looked good. The Roland CAMM-1 Servo GX-24 vinyl cutter. I used a force setting of 120 grams to cut the copper sheet. Here's my first try at weeding copper sheet: It worked, but could be better. Neil recommended sticking everything to the final surface and then weeding. I will definitely try that next time. But this time I weeded first and then glued the copper pads to the laser cut acrylic. I forgot to add holes for pins, so I drilled them afterwards: When I had my students make sensors like these, I added the holes to the laser cutting file. The only component on the board is a 10k resistor. I grabbed a 10k resistor and soldered it to the pads. I'm surprised that the tiny pads survived my rough handling. Then I measured the resistance from one pin to the touch sensitive pad, to check if the solder connections were OK: The soldering was fine, and the next step was to open a capacitive touch sensor example sketch in the Arduino IDE and connect my new sensor to a SparkFun RedBoard: Note I used ffmpeg -i input_video -vcodec libx264 -crf 25 -preset medium -vf scale=-2:1080 -acodec libmp3lame -q:a 4 -ar 48000 -ac 2 output_video.mp4 to compress the video from my phone. Fab Lab Kannai put this into a tutorial . It works! Here's the Arduino sketch, which I modified a little bit (just commented out what I didn't need): #include <CapacitiveSensor.h> /* * CapitiveSense Library Demo Sketch * Paul Badger 2008 * Uses a high value resistor e.g. 10M between send pin and receive pin * Resistor effects sensitivity, experiment with values, 50K - 50M. Larger resistor values yield larger sensor values. * Receive pin is the sensor pin - try different amounts of foil/metal on this pin */ //CapacitiveSensor cs_4_2 = CapacitiveSensor(4,2); // 10M resistor between pins 4 & 2, pin 2 is sensor pin, add a wire and or foil if desired CapacitiveSensor cs_4_6 = CapacitiveSensor ( 4 , 6 ); // 10M resistor between pins 4 & 6, pin 6 is sensor pin, add a wire and or foil //CapacitiveSensor cs_4_8 = CapacitiveSensor(4,8); // 10M resistor between pins 4 & 8, pin 8 is sensor pin, add a wire and or foil int LEDpin = 13 ; void setup () { // cs_4_2.set_CS_AutocaL_Millis(0xFFFFFFFF); // turn off autocalibrate on channel 1 - just as an example Serial . begin ( 9600 ); } void loop () { long start = millis (); // long total1 = cs_4_2.capacitiveSensor(30); long total2 = cs_4_6 . capacitiveSensor ( 30 ); // long total3 = cs_4_8.capacitiveSensor(30); // Serial.print(millis() - start); // check on performance in milliseconds // Serial.print(\"\\t\"); // tab character for debug windown spacing // Serial.print(total1); // print sensor output 1 Serial . print ( \" \\t \" ); Serial . println ( total2 ); // print sensor output 2 // Serial.print(\"\\t\"); // Serial.println(total3); // print sensor output 3 delay ( 10 ); // arbitrary delay to limit data to serial port // if (total3 > 40) // { // digitalWrite(LEDpin, HIGH); // } // else // { // digitalWrite(LEDpin, LOW); // } } Here are the design files: Download sensor vinyl cutting file Download outline laser cutting file","title":"Circuit cut with vinyl cutter"},{"location":"assignments/week03.html#rubber-stamp","text":"For the rubber stamp engraving, I used this reference . I was able to cut through the rubber at 5% speed and 100% power with out 40W Epilog Helix laser cutter: The engraving test that looked cleanest to me was at 30% speed and 100% power. I then engraved the Fab Lab \u00cdsafj\u00f6r\u00f0ur logo with two such passes and cut out the outline: As you can see, this makes for an awful stamp. I both forgot to mirror the logo and invert it, to make it stick out. I haven't had time to make a proper stamp yet, but the tests look promising. Here are the design files: Download rubber test file Download failed rubber stamp logo Download failed rubber stamp outline","title":"Rubber stamp"},{"location":"assignments/week03.html#kerf-test","text":"The missing width when I've laser cut these ten lines is exactly 2 mm. That means that the laser's kerf (or the diameter of the laser point) is \\[\\frac{2mm}{10}=\\underline{0.2mm}\\] So when I design a press-fit joint, I need to offset all lines outward by 0.1 mm. Download kerf test","title":"Kerf test"},{"location":"assignments/week03.html#raster-test","text":"I used the same raster test as I did with the rubber stamp. I set the laser power to 100% and the frequency to the maximum 5000 Hz. Then I enabled Color Mapping in the Epilog Laser settings within the Print dialog in the PDF viewer. Red is 60% speed, green is 50%, blue is 40%, yellow is 30%, magenta is 20% and cyan is 10%. That goes for both rastering (top row) and cutting (bottom row). Here's how the 4 mm MDF reacted to these settings: To get a clean raster, the speed must be above 30%. To cut through the material, the speed must be 10% or slower. I usually use 5% speed to make sure that the laser cuts through. Download raster test file .md-content__button { display: none; }","title":"Raster test"},{"location":"assignments/week04.html","text":"Embedded Programming The RP2040 datasheet I read the Raspberry Pi RP2040 datasheet loosely. This is one of the more interesting microcontrollers to me, because of the flexible Programmable Input/Output (PIO), two fast cores and you can program it in the Arduino IDE or in the friendly Python language. The datasheet also looks friendlier than datasheets for other microcontrollers. Still, that may only be the graphic design, because the actual content is cryptic. I understood like 0.5% of what I read. But I did pick up some interesting tidbits. The RP2040 has a built-in Real Time Clock. That would be useful for my wake-up mask. It has a dormant state which uses almost no power and it can be woken up by the RTC. That also sounds good for the wake-up mask. But in that case, the RTC needs to use an external clock source, which can be as slow as 1 Hz. Hold on, the RTC needs an external crystal. That's good to know. I thought you didn't need to add a crystal, but the only truly internally generated clock is the Ring Oscillator. Its speed varies with process, voltage and temperature, so it can't be used for RTC, USB communication or analog to digital conversion. When reading the RP2040 datasheet and testing the Xiao RP2040, I wondered: Does the tiny Xiao board have a crystal? I don't see one on the board. Is it underneath the metal cover with the RP2040? If it doesn't have an external oscillator, then the Real Time Clock doesn't work and I can't use it in my wake-up mask. I looked around and found the schematic for the Xiao RP2040 and there is a crystal and Flash memory. Good. They must be underneath the cover. So there is a crystal. But maybe the internal RTC in the RP2040 isn't good enough and I need to use something like the DS3231 chip . Update: My instructor \u00de\u00f3rarinn showed me a picture of what's underneath the metal cover. The picture was actually on the same page as the schematic that I had found, but I hadn't looked at the whole page. So here it is, and you can see the crystal marked with 12.000: I watched a video that stated that computers are horrible at division. So I was glad to see that the Raspberry Pi Foundation decided to implement a hardware division unit in the RP2040. I wondered why there was no hardware multiplier until I got to the section about the Arm Cortex M0+ cores inside the RP2040. The Arm specification has hardware multiplication built in. I watched another video that shows how you can easily overclock the Raspberry Pi Pico. Overclocking has always sounded to me as if you're going to overheat the device and damage it, or make it unstable, but the datasheet says that overclocking processors in general is usually fine. In the video, a maximum speed of 250 MHz is recommended. I was interested in the example programs provided under the Programmer's Model headings in the datasheet but I have no idea where to put them or how to run them. There are also lots of tables with registers and commands and stuff like that but I can make neither heads nor tails of them. What are they? What would I do with them? The mysteries continue. The RP2040 chip has 36 General Purpose Input/Output pins. That's quite a lot. Typically, a few of those pins are connected to an external flash memory chip, as in the Raspberry Pi Pico board. The Pi Pico board has 26 user-accessible pins. Three of them can be used for reading analog signals and converting them to digital numbers. The ADCs are officially 12 bit but the effective resolution is I think something like 10 bits because of an error in the chip design. The RP2040 has plenty of communication interfaces (SPI, UART, I2C, USB), and if you need something else, you can make it yourself with the Programmable IO blocks. It has eight two channel PWM generators (they're called slices). So can it generate 8 PWM signals, or 16? The PIO machines can always see the states of all pins. That's interesting. I think I remember reading that the DMA channels share a similar view. My Master's thesis advisor said that when programming in C, you get much more access to all the computer's devices and peripherals. When the University stopped paying the license for his vibration meter, he wrote a driver for it in C. That sounds like wizardry to me, and more than a little fascinating. But the C code examples in the RP2040 datasheet don't look fun to me. PIO This is the most interesting part, the Programmable IO. There are two PIO blocks in the RP2040 and each has four state machines. That means that you have eight simple, deterministic and precisely timed cores that are specialized for input and output. Each state machine has two 32 bit shift registers. Jakob Einar, a frequent guest at Fab Lab \u00cdsafj\u00f6r\u00f0ur, made a cnc plotter using old DVD drives. He said that a shift register enables a slow Arduino Uno to control all the steppers in sync. I'm curious about how a shift register works. There are two PIO blocks with four state machines each. Each state machine has: Two 32-bit shift registers (I need to find out what they do) Two 32-bit scratch registers (you can think of them as variables, they're called x and y) 4x32-bit bus FIFO in each direction or 8x32 in a single direction (data flows in and out using these and gets stored in the two variables x and y) Fractional clock divider (if you want the PIO to run slower than the main clock speed) DMA interface (to get lots of data from memory without using the processor, or put data into memory) IRQ flag set/clear/status (I think this is to alert the main processor that the PIO has finished an operation) The state machine has only nine instructions: JMP , WAIT , IN , OUT , PUSH , PULL , MOV , IRQ AND SET . Each instruction takes exactly one cycle. It's interesting to try programming at this level. I like the fact that the first three bits of each instruction is like this: JMP : 001, WAIT : 010 and so on. I understand that labeling system! Precise timing is what microcontrollers are good at! You can get PIO programs from the Raspberry Pi Pico PIO library, you can write them yourself or (and this is the most interesting case) you can generate them programmatically. I wouldn't know how to do that, though. All the supplied code examples are written in C. This may be a good introduction. I'm putting this here for myself to discover later. Here's some more stuff. I did find one example of using DMA in MicroPython code. The iosoft blog says that MicroPython doesn't have built-in functions to support DMA and doesn't provide a simple way to access the ADC, DMA and I/O pin registers. But there is a way to define these registers using the obscure uctypes . And then you need to be familiar with the RP2040 datasheet. Here someone has written some code based on the iosoft blog. This should be easy to try out, because it's Python code. Here's a resource with good pictures that goes into PIO + DMA. Most instructions are executed from the instruction memory, but there are a few other sources. The most versatile of these is the OUT EXEC instruction. You can use this to embed instructions in the data stream passing through the FIFO. I didn't understand the Wikipedia entry on shift registers. The Sparkfun article says that they're used to add more I/O pins to a microcontrollers. But that's not what's happening in the PIO. Apparently, you load one bit into the shift register at a time (a 1 or a 0). When a new bit enters, all the bits inside the shift register are shifted to make room for it. Then you can use the latch pin to output all the bits at the same time from the shift register. So you're turning a serial port into a parallel port. I guess this is useful for synchronized CNC movements, but how does it work inside the PIO block in the RP2040? The output of the PIO assembler is shown, and it consists of hexadecimal numbers. They look scary and incomprehensible. I noticed that they all have an x in them, so I asked Google why hexadecimal numbers all start with 0x. Turns out it's just to let the parser know that it's a hexadecimal base number instead of other bases. I've seen a lot of definitions and explanations, but what I really need is an example that I can run. I still don't understand how the data flows and how the operations are carried out. To see what's going on inside the chip, I probably need something like PicoReg . It's a debugger for the Raspberry Pi Pico. The advantage is that it's written entirely in Python I can set it up on a Raspberry Pi. I can wrap my head around that. I'd like to try to implement a PIO+DMA stepper controller with acceleration ramping. But I likely won't have time for that this week. V. Hunter Adams at Cornell has implemented it , but to use it you need to set up a toolchain for C. This forum post is also interesting. cleverca22 's comment on this post may also hold clues. Seemingly unrelated, but there may also be some clues in the servo easing algorithm. There are stepper control PIO programs here and here . But they don't implement ramping. And again, you need to use the C/C++ SDK. The RP2040 datasheet says that if you want to get started with PIO, a walkthrough of writing your first PIO program is in the Pico C/C++ SDK. It also covers using PIO with DMA. But I want to use Python. Or at least I want to be able to make a library for Python. Is it possible to do that in C/C++? Yes. But if you build an external C module for MicroPython, you have to build your own MicroPython firmware to put on the RP2040. There's another possibility , which I like better: An alternative approach is to use Native machine code in .mpy files which allows writing custom C code that is placed in a .mpy file, which can be imported dynamically in to a running MicroPython system without the need to recompile the main firmware. I flipped quickly through all the communications protocols but read PWM more carefully. If I will use brushless motors in my robot arm, I need to get to know Pulse Width Modulation. It's interesting how flexible the pin mapping on the RP2040 is, the PWM slices can use any of the GPIO pins, and so can PIO. I actually understood a part of the description of how a PWM slice works. It's a 16-bit counter that the wraps back to zero. Wrap is a command in pioasm that enables you to jump back to the beginning without using a JMP command, and it takes 0 cycles. I'm beginning to understand a little bit, even if I can't use these things yet. I guess I could DMA to push a sine wave from a lookup table to three PWM outputs to control a three-phase brushless motor. Then the main processors would be free to do other tasks. This arbitrary wave generator using PIO and DMA may be worth taking a look at when I have some time. There's a lot going on inside this $1 chip. It's like a whole city, complete with different neighborhoods connected together with highways and smaller roads to move data between places. All roads lead to the C/C++ SDK, it seems. Here are community libraries for the SDK. There are also lots of examples in the Raspberry Pi GitHub repo. I will have to jump in at some point. One question remains: What is the OSR? There is a bit of programmer humor in the datasheet, on page 359 it says that I2C is an ubiquitous serial bus first described in the Dead Sea Scrolls, and later used by Philips Semiconductor. On page 364 it is conjectured that the PIO can run DOOM with a high enough clock speed. On page 365 it says that a full 32-bit addition takes the PIO only around one minute at 125 MHz. What? So using the PIO for mathematical operations takes hundreds of millions of cycles? DMA Direct Memory Access is the other interesting feature in the RP2040. It's a memory controller that can copy data from one place to another very fast without the processor's intervention. Throughout the RP2040 datasheet there are mentions that this and that part of the chip has a DMA interface. It looks like it's important to learn to use DMA if you want to make things run fast. Could DMA be used to feed a PIO state machine with acceleration ramps for a stepper? I would like to learn a little bit about how DMA works and how to set it up, but it seems that it's not possible in MicroPython. The Raspberry Pi Pico C/C++ SDK instructions mention that you can use DMA by including a library called hardware_dma. Setting up a C/C++ toolchain sounds intimidating, but maybe I have to do it if I want to try using DMA. I think DMA programming is too complicated to get into for now. ADC The Analog to Digital Converter takes 96 clock cycles to make a 12-bit measurement. That's good to know. The RP2040 ADC has a few errors, most notably that the quantization error looks like a sawtooth. Also, because the wrong size of capacitor was used in one part of the ADC, there are big spikes in differential non-linearity. I won't pretend to know what that is, but it means that there will be spikes in the measured values in four places. The scaling factor for the internal temperature sensor is specified here in the datasheet. You can see it an example code in the Chip temperature section below. I looked at the Raspberry Pi Pico Python SDK and found it a bit thin. I also skimmed Getting started with Raspberry Pi Pico, which shows how to set up a C/C++ programming environment for the Pi Pico, and it looks extremely complicated. But that's where the juicy stuff is. Programming the Xiao RP2040 The Xiao RP2040 has a reset button, so it's more convenient to use in that respect than the Raspberry Pi Pico. Hello RP2040 I tried Neil Gershenfeld's Hello RP2040 Python program: I like having an RGB LED to play with. It could serve as a simple interface for the wake-up mask or the robot arm if I assign a different meaning to every color. Chip temperature I did this one with the Raspberry Pi Pico. Setup and programming is identical to the Xiao RP2040. I found a nice tutorial on using the temperature sensor which is built into the RP2040 chip. It's useful to make sure that the chip doesn't overheat, but it's also a good exercise in measuring an analog voltage. Since I've tried blinking the onboard LED already, I'm going to use that knowledge to modify this program to turn on the LED when the temperature crosses a threshold: from machine import ADC , Pin import time led = machine . Pin ( \"LED\" , machine . Pin . OUT ) adc = machine . ADC ( 4 ) while True : ADC_voltage = adc . read_u16 () * ( 3.3 / ( 65535 )) temperature = 27 - ( ADC_voltage - 0.706 ) / 0.001721 print ( \"Temperature: {} \u00b0C\" . format ( temperature )) if temperature > 26 : led . value ( 1 ) else : led . value ( 0 ) time . sleep_ms ( 100 ) I added the if statement and removed the temperature in Fahrenheit. You can see that you need to scale the analog measurement to get the temperature in degrees Celsius. I suspect that that every chip will give a slightly different value because of manufacturing variability. So it might be better to measure the temperature with a better sensor and put that value into the scaling factor. But since this sensor isn't meant to be super precise, we'll let it be. Interfacing with LCD screen I connected a classic 16x2 character LCD screen to the Raspberry Pi Pico, but it didn't work. I needed to use the 4-bit mode (where you connect the LCD directly to the microcontroller) because I didn't have an LCD driver chip. Aby Michael's Fab Academy site had a useful diagram with the LCD pins. The ElectronicWings diagram was even more useful. After some Googling I found that the screen needs 5V signals. The RP2040 is a 3.3V chip, so that's why I'm getting glitchy results. Its strange that it worked with a Pico in the tutorial that I used (click the link in the video description for a connection diagram). I'd like to try the small OLED screen that is in the Fab Lab inventory next. That one is 3-5V tolerant, and much smaller and versatile than the 16x2. And it only needs four pins. Look at that nest of wires above! The OLED will be a great improvement. Here are the files I used, you just open each of them in Thonny and save them to your Pico. The main.py file runs automatically when you power the Pico on. The other two are a library for interfacing with the LCD. Beware, this didn't work for me. Download main.py Download lcd_api.py Download gpio_lcd.py Interfacing with OLED screen I used Kevin McAleer's simple code example for the SSD1306 OLED screen. It just writes Test 1 to the screen. It was exhilarating to see the tiny letters light up on the screen on the first try! I then changed the text to something more useful, a prototype display for the Frankenstein MCU , which Fran and my instructor \u00de\u00f3rarinn are working on. The plan is to make a WiFi connected button in every Fab Lab and the staff push the button every day to show that the lab is active. The connections between labs appear on a spinning globe made in Three.js, which runs in the browser. Download oled.py Stepper control with RP2040 This one I also did with the Raspberry Pi Pico. from machine import Pin from time import sleep IN1 = Pin ( 2 , Pin . OUT ) IN2 = Pin ( 3 , Pin . OUT ) IN3 = Pin ( 4 , Pin . OUT ) IN4 = Pin ( 5 , Pin . OUT ) pins = [ IN1 , IN2 , IN3 , IN4 ] sequence = [[ 1 , 0 , 0 , 0 ],[ 0 , 1 , 0 , 0 ],[ 0 , 0 , 1 , 0 ],[ 0 , 0 , 0 , 1 ]] while True : for step in sequence : for i in range ( len ( pins )): pins [ i ] . value ( step [ i ]) sleep ( 0.01 ) Controlling the stepper is surprisingly simple. It has four phases, so you connect it to four pins on the Pico. Then you just alternate which pin is HIGH while the others are low. The stepper motor has 64 steps per rotation, but it also has a 1/64 gearing ratio, so in total it has 64 * 64 = 4096 steps per rotation. That's pretty good for a 3 dollar stepper! I bought a set of five 28-BYJ-48 steppers with ULN2003 drivers for $15. They're cheap and precise! But there are two drawbacks. They're quite weak. For a motor with 1:64 gearing, it's surprisingly easy to make it skip steps. Also, the gearbox introduces backlash which is much bigger than the step size. The step size is 0.09\u00b0 but the backlash seems to be a few degrees. Maybe it's possible to correct for the slop in software every time the motor changes direction. But that won't work 100% and definitely not with small motions. I wonder if these motors are a good fit for my robot arm. In the video above I changed the sleep value and then ran the program. First it was 0.1 s, then 0.01 s and finally 0.001 s. When I went below 0.01 s, the stepper stalled. It's fun to have status LEDs on all the phases. At slow speeds you can see how the phases are turned on and off. I want to have status LEDS on every output pin on every microcontroller board! It's a really handy debugging tool. I laser cut a press-fit acrylic arrow to see the motor's movement better. Since I have the 12V version of the 28BYJ-48 motors, I can run them on a 9V battery. So here's my first foray into battery-powered electronics. The Pico is still powered by a USB cable, though. I need to learn how to make a 5V regulator board, so that I can also power the Pico using the 9V battery. PIO stepper control I followed a tutorial and wrote a program that controls a stepper with PIO, without using the processor at all. I then modified it to include four PIO state machines that run the same program but are clocked at different frequencies: Here's the code: from machine import Pin from rp2 import PIO, StateMachine, asm_pio from time import sleep import sys @asm_pio(set_init=(PIO.OUT_LOW,) * 4) def prog(): wrap_target() set(pins, 8) [31] #8 nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] set(pins, 4) [31] #4 nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] set(pins, 2) [31] #2 nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] set(pins, 1) [31] #1 nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] wrap() sm0 = StateMachine(0, prog, freq=100000, set_base=Pin(2)) sm1 = StateMachine(1, prog, freq=50000, set_base=Pin(6)) sm2 = StateMachine(2, prog, freq=25000, set_base=Pin(10)) sm3 = StateMachine(3, prog, freq=12500, set_base=Pin(18)) sm0.active(1) sm1.active(1) sm2.active(1) sm3.active(1) sleep(5) sm0.active(0) sm1.active(0) sm2.active(0) sm3.active(0) sm0.exec(\"set(pins,0)\") sm1.exec(\"set(pins,0)\") sm2.exec(\"set(pins,0)\") sm3.exec(\"set(pins,0)\") nop() is no operation, and you can optionally add a delay after every command, like this: [1] . That was a delay of one clock cycle. I think 31 cycles is the maximum. Adding a Reset button The Raspberry Pi Pico has no reset button, but it resets if you connect the RUN pin to ground. So I just connected a jumper to the RUN pin on the breadboard and make the other end of the jumper touch the GND pin that is one pin over to reset the Pico. That's more convenient than unplugging the USB cable and plugging it in again. Overclocking the RP2040 I tried Chris DeHut's RP2040 overclocking video . He has lots of good stuff about the Pico. Here's his program which changes the clock speed a few times and measures the time it takes to do 100.000 sets of the calculations in the Do_Stuff function: ''' PICO default clock speed is 125 MHz Demo to show time to make a bunch of basic math calculations at varaious clock speeds that the PICO can handle ''' import machine import time import machine led_onboard = machine . Pin ( 25 , machine . Pin . OUT ) def Do_Stuff (): st = time . ticks_ms () Y = 0 while Y < 100000 : Y += 1 Z = 57 Z1 = Z + Y Z2 = Z - Y Z3 = Z * ( Z + Y ) #print(Y, Z1, Z2, Z3) led_onboard . value ( 0 ) #print(Y) et = time . ticks_ms () #print(et, st, et-st) return et - st cntr = 0 while cntr < 2 : #run whole test several times for observation cntr += 1 machine . freq ( 125000000 ) #set clock to 125 MHz x = machine . freq () t = Do_Stuff () print ( \" \\n @\" , x , \" time to run =\" , t , \"ms\" ) machine . freq ( 140000000 ) #set clock to 140 MHz x = machine . freq () t = Do_Stuff () print ( \" \\n @ \" , x , \" time to run =\" , t , \"ms\" ) machine . freq ( 200000000 ) #set clock to 200 MHz x = machine . freq () t = Do_Stuff () print ( \" \\n @ \" , x , \" time to run =\" , t , \"ms\" ) machine . freq ( 250000000 ) #set clock to 250 MHz x = machine . freq () t = Do_Stuff () print ( \" \\n @ \" , x , \" time to run =\" , t , \"ms\" ) machine . freq ( 125000000 ) #set clock to 125 MHz to clean things up print ( \" \\n All Done Folks!\" ) The loop runs a few times so that you can take the average of the time measurements. This is one set of measurements: @ 125000000 time to run = 1167 ms @ 140000000 time to run = 1042 ms @ 200000000 time to run = 729 ms @ 250000000 time to run = 583 ms That's a big difference by varying the clock speed, and the RP2040 datasheet says that overclocking is usually safe. I will keep this in mind when my programs are running slowly. Then I tried going a bit higher. It worked at 280 MHz, but at 290 or 300 MHz, I got no response from the microcontroller. So this is the best time: @ 280000000 time to run = 521 ms I think I will not go above 250 MHz. That's a lot, twice as fast as the standard RP2040 with a 12 MHz crystal, like in the Pico and the Xiao RP2040. There are instructions online on how to go up to 420 MHz and there is at least one commercial board that runs the RP2040 at 400 MHz. Custom performance test I modified the PIO program to control four steppers at different speeds, by modifying the clock dividers in each of the state machines. The state machines all run the same program. It works, and there should be no load on the main processor. The video looks the same as the four stepper video above. Now let's verify that there is no load on the main processor. I'll take Chris DeHut's 100 thousand calculation routine and put it into the PIO stepper program. Here are the results while driving four steppers at different speeds at the same time: @ 125000000 time to run = 1167 ms @ 140000000 time to run = 1042 ms @ 200000000 time to run = 729 ms @ 250000000 time to run = 584 ms Those are the same times as the first case, which had no steppers. Wow! I then increased the number of calculation loops to 300 thousand, so that we can see what happens to the steppers as we increase the main clock frequency from 125 MHz to 140, 200 and finally 250 MHz. As you can see from the video below, the steppers speed up until the fastest stepper stalls when the clock speed goes up to 250 MHz. For comparison with the PIO routine, I also tried to mix the 100k calculation code with code where the processor controls four steppers at the same time, but i couldn't get those two things to happen at the same time. But I could probably run those things on core0 and core1 with good results. Let's try, using this tutorial to learn how threads work in Python: @ 125000000 time to run = 1181 ms @ 140000000 time to run = 1053 ms @ 200000000 time to run = 734 ms @ 250000000 time to run = 587 ms It worked! My first dual-core program! The steppers just kept on running on core1 after the calculations finished on core0. And the calculation times are good! They're just a few milliseconds longer the 4 stepper PIO + 100k calculation routine. Here's the code: import machine import time import machine from machine import Pin from rp2 import PIO, StateMachine, asm_pio from time import sleep import sys led_onboard = machine.Pin(25, machine.Pin.OUT) @asm_pio(set_init=(PIO.OUT_LOW,) * 4) def prog(): wrap_target() set(pins, 8) [31] #8 nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] set(pins, 4) [31] #4 nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] set(pins, 2) [31] #2 nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] set(pins, 1) [31] #1 nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] wrap() sm0 = StateMachine(0, prog, freq=50000, set_base=Pin(2)) sm1 = StateMachine(1, prog, freq=25000, set_base=Pin(6)) sm2 = StateMachine(2, prog, freq=12500, set_base=Pin(10)) sm3 = StateMachine(3, prog, freq=6250, set_base=Pin(18)) sm0.active(1) sm1.active(1) sm2.active(1) sm3.active(1) #sleep(5) def Do_Stuff(): st = time.ticks_ms() Y = 0 while Y < 300000:+\u00f0\u00f0\u00f0\u00f0 Y += 1 Z = 57 Z1 = Z + Y Z2 = Z - Y Z3 = Z * (Z + Y) #print(Y, Z1, Z2, Z3) led_onboard.value(0) #print(Y) et = time.ticks_ms() #print(et, st, et-st) return et-st cntr = 0 while cntr < 2: #run whole test several times for observation cntr += 1 machine.freq(125000000) #set clock to 125 MHz x = machine.freq() t = Do_Stuff() print(\"\\n@\", x, \" time to run =\", t, \"ms\") machine.freq(140000000) #set clock to 140 MHz x = machine.freq() t = Do_Stuff() print(\"\\n@ \", x, \" time to run =\", t, \"ms\") machine.freq(200000000) #set clock to 200 MHz x = machine.freq() t = Do_Stuff() print(\"\\n@ \", x, \" time to run =\", t, \"ms\") machine.freq(250000000) #set clock to 250 MHz x = machine.freq() t = Do_Stuff() print(\"\\n@ \", x, \" time to run =\", t, \"ms\") machine.freq(125000000) #set clock to 125 MHz to clean things up print(\"\\n All Done Folks!\") sm0.active(0) sm1.active(0) sm2.active(0) sm3.active(0) sm0.exec(\"set(pins,0)\") sm1.exec(\"set(pins,0)\") sm2.exec(\"set(pins,0)\") sm3.exec(\"set(pins,0)\") Xiao SAMD21 The SAMD21 runs at 48 MHz, which is considerably slower than the RP2040. But it's easy to solder. I'll probably use it because of that. It also has a DAC output. For hobbyists, the SAMD21 can only be programmed in the Arduino IDE and CircuitPython. For professionals, you can use Microchip Studio or set up your own toolchain. So I set up the board in the Arduino IDE the way you would set up any new board: I added the proper link into \"Additional Boards Manager URLs\" in Preferences, and then I could find Seeed SAMD21 Boards in the Boards Manager and install them. It's as easy as that, and then selecting the right board (Seeeduino Xiao). The setup is covered in this tutorial . I tried connecting it to the Arduino IDE but it didn't work. Xiao ESP32 C3 The ESP32 C3 runs at 160 MHz, which is fast, but it's not possible to overclock it. But the most amazing thing is that this tiny microcontroller has WiFi and Bluetooth built in! This is my first RISC-V chip. As Neil says, RISC-V will probably take over the world. It's an open source architecture, so unlike the ARM chips that are found in microcontrollers, billions of smartphones and all the way up to supercomputers, manufacturers don't have to pay huge licensing fees to make RISC-V chips. I tried connecting it to the Arduino IDE but it didn't work. ESP-32 CAM ESP-32 CAM is a 10 dollar WiFi camera! I had to have one and try it out. It might play well with my robot arm. I followed this tutorial to set up the ESP32-CAM board. To install the ESP32 boards in the Arduino IDE, I followed that tutorial . Then I tested the board with this tutorial . I uncommented the AI-Thinker CAM definition in the setup and input my WiFi credentials in the sketch. When the Arduino IDE had uploaded the code, the following message was left hanging: Leaving... Hard resetting via RTS pin... But then I checked and saw that above it, the status bar said \"Done uploading\". I also found this explanation , which said that this isn't an error at all. First I saw nothing in the serial monitor. Then I removed an extra #define camera line. Still nothing. Then I switched to the 5G network, the same as my laptop is on. Then I saw a series of dots form .... but no IP address. But when I did in the opposite order from the tutorial (first pressed RESET, then removed the jumper) I got this message: ets Jul 29 2019 12:21:46 rst:0x1 (POWERON_RESET),boot:0x3 (DOWNLOAD_BOOT(UART0/UART1/SDIO_REI_REO_V2)) waiting for download ets Jul 29 2019 12:21:46 Something's happening! I removed the jumper and pressed reset again: rst:0x1 (POWERON_RESET),boot:0x13 (SPI_FAST_FLASH_BOOT) configsip: 0, SPIWP:0xee clk_drv:0x00,q_drv:0x00,d_drv:0x00,cs0_drv:0x00,hd_drv:0x00,wp_drv:0x00 mode:DIO, clock div:1 load:0x3fff0030,len:1344 load:0x40078000,len:13836 load:0x40080400,len:3608 entry 0x400805f0 . WiFi connected Camera Ready! Use 'http://192.168.1.32' to connect Success! After scrolling down and pressing \"Start stream\", I could see a live stream from the camera over WiFi to my browser! At QVGA resolution (320x240), the stream is smooth. At UXGA resolution (1600x1200), the stream stutters. SVGA (800x600) is the highest resolution where lag isn't very noticeable. That's pretty good for a microcontroller! I love it. And I find that in low light, greyscale looks a lot better than a color image. Note This time I used ffmpeg -i esp32-cam_test1.mp4 -vf scale=400:-2 -vcodec libx264 -an -crf 20 esp32-cam_test1_web.mp4 to compress the video and remove the audio as shown here . This video shows ways to control the image capture just by typing URLs into the browser. If you have a computer nearby at all times, you can also stream video from the ESP32-CAM to any browser in the world. And here's a guide to modifying the HTML code inside the Arduino code. It's not obvious how to do it, since the HTML has been converted to hex code in the Arduino sketch. I checked whether it's possible to overclock the ESP32, but 240 MHz seems to be the maximum clock speed. FPGA I bought an FPGA development board from Seeed Studio. It is the Runber board , which contains a Gowin FPGA. I took the plunge because it is the cheapest FPGA board I've found that still has a set of tutorials . I looked at some of the documentation and it looks intimidating. I applied for a software license on a Friday and got it in the beginning of the next week. I haven't had the time to set it up and go through a tutorial, though. .md-content__button { display: none; }","title":"4. Embedded Programming"},{"location":"assignments/week04.html#embedded-programming","text":"","title":"Embedded Programming   "},{"location":"assignments/week04.html#the-rp2040-datasheet","text":"I read the Raspberry Pi RP2040 datasheet loosely. This is one of the more interesting microcontrollers to me, because of the flexible Programmable Input/Output (PIO), two fast cores and you can program it in the Arduino IDE or in the friendly Python language. The datasheet also looks friendlier than datasheets for other microcontrollers. Still, that may only be the graphic design, because the actual content is cryptic. I understood like 0.5% of what I read. But I did pick up some interesting tidbits. The RP2040 has a built-in Real Time Clock. That would be useful for my wake-up mask. It has a dormant state which uses almost no power and it can be woken up by the RTC. That also sounds good for the wake-up mask. But in that case, the RTC needs to use an external clock source, which can be as slow as 1 Hz. Hold on, the RTC needs an external crystal. That's good to know. I thought you didn't need to add a crystal, but the only truly internally generated clock is the Ring Oscillator. Its speed varies with process, voltage and temperature, so it can't be used for RTC, USB communication or analog to digital conversion. When reading the RP2040 datasheet and testing the Xiao RP2040, I wondered: Does the tiny Xiao board have a crystal? I don't see one on the board. Is it underneath the metal cover with the RP2040? If it doesn't have an external oscillator, then the Real Time Clock doesn't work and I can't use it in my wake-up mask. I looked around and found the schematic for the Xiao RP2040 and there is a crystal and Flash memory. Good. They must be underneath the cover. So there is a crystal. But maybe the internal RTC in the RP2040 isn't good enough and I need to use something like the DS3231 chip . Update: My instructor \u00de\u00f3rarinn showed me a picture of what's underneath the metal cover. The picture was actually on the same page as the schematic that I had found, but I hadn't looked at the whole page. So here it is, and you can see the crystal marked with 12.000: I watched a video that stated that computers are horrible at division. So I was glad to see that the Raspberry Pi Foundation decided to implement a hardware division unit in the RP2040. I wondered why there was no hardware multiplier until I got to the section about the Arm Cortex M0+ cores inside the RP2040. The Arm specification has hardware multiplication built in. I watched another video that shows how you can easily overclock the Raspberry Pi Pico. Overclocking has always sounded to me as if you're going to overheat the device and damage it, or make it unstable, but the datasheet says that overclocking processors in general is usually fine. In the video, a maximum speed of 250 MHz is recommended. I was interested in the example programs provided under the Programmer's Model headings in the datasheet but I have no idea where to put them or how to run them. There are also lots of tables with registers and commands and stuff like that but I can make neither heads nor tails of them. What are they? What would I do with them? The mysteries continue. The RP2040 chip has 36 General Purpose Input/Output pins. That's quite a lot. Typically, a few of those pins are connected to an external flash memory chip, as in the Raspberry Pi Pico board. The Pi Pico board has 26 user-accessible pins. Three of them can be used for reading analog signals and converting them to digital numbers. The ADCs are officially 12 bit but the effective resolution is I think something like 10 bits because of an error in the chip design. The RP2040 has plenty of communication interfaces (SPI, UART, I2C, USB), and if you need something else, you can make it yourself with the Programmable IO blocks. It has eight two channel PWM generators (they're called slices). So can it generate 8 PWM signals, or 16? The PIO machines can always see the states of all pins. That's interesting. I think I remember reading that the DMA channels share a similar view. My Master's thesis advisor said that when programming in C, you get much more access to all the computer's devices and peripherals. When the University stopped paying the license for his vibration meter, he wrote a driver for it in C. That sounds like wizardry to me, and more than a little fascinating. But the C code examples in the RP2040 datasheet don't look fun to me.","title":"The RP2040 datasheet"},{"location":"assignments/week04.html#pio","text":"This is the most interesting part, the Programmable IO. There are two PIO blocks in the RP2040 and each has four state machines. That means that you have eight simple, deterministic and precisely timed cores that are specialized for input and output. Each state machine has two 32 bit shift registers. Jakob Einar, a frequent guest at Fab Lab \u00cdsafj\u00f6r\u00f0ur, made a cnc plotter using old DVD drives. He said that a shift register enables a slow Arduino Uno to control all the steppers in sync. I'm curious about how a shift register works. There are two PIO blocks with four state machines each. Each state machine has: Two 32-bit shift registers (I need to find out what they do) Two 32-bit scratch registers (you can think of them as variables, they're called x and y) 4x32-bit bus FIFO in each direction or 8x32 in a single direction (data flows in and out using these and gets stored in the two variables x and y) Fractional clock divider (if you want the PIO to run slower than the main clock speed) DMA interface (to get lots of data from memory without using the processor, or put data into memory) IRQ flag set/clear/status (I think this is to alert the main processor that the PIO has finished an operation) The state machine has only nine instructions: JMP , WAIT , IN , OUT , PUSH , PULL , MOV , IRQ AND SET . Each instruction takes exactly one cycle. It's interesting to try programming at this level. I like the fact that the first three bits of each instruction is like this: JMP : 001, WAIT : 010 and so on. I understand that labeling system! Precise timing is what microcontrollers are good at! You can get PIO programs from the Raspberry Pi Pico PIO library, you can write them yourself or (and this is the most interesting case) you can generate them programmatically. I wouldn't know how to do that, though. All the supplied code examples are written in C. This may be a good introduction. I'm putting this here for myself to discover later. Here's some more stuff. I did find one example of using DMA in MicroPython code. The iosoft blog says that MicroPython doesn't have built-in functions to support DMA and doesn't provide a simple way to access the ADC, DMA and I/O pin registers. But there is a way to define these registers using the obscure uctypes . And then you need to be familiar with the RP2040 datasheet. Here someone has written some code based on the iosoft blog. This should be easy to try out, because it's Python code. Here's a resource with good pictures that goes into PIO + DMA. Most instructions are executed from the instruction memory, but there are a few other sources. The most versatile of these is the OUT EXEC instruction. You can use this to embed instructions in the data stream passing through the FIFO. I didn't understand the Wikipedia entry on shift registers. The Sparkfun article says that they're used to add more I/O pins to a microcontrollers. But that's not what's happening in the PIO. Apparently, you load one bit into the shift register at a time (a 1 or a 0). When a new bit enters, all the bits inside the shift register are shifted to make room for it. Then you can use the latch pin to output all the bits at the same time from the shift register. So you're turning a serial port into a parallel port. I guess this is useful for synchronized CNC movements, but how does it work inside the PIO block in the RP2040? The output of the PIO assembler is shown, and it consists of hexadecimal numbers. They look scary and incomprehensible. I noticed that they all have an x in them, so I asked Google why hexadecimal numbers all start with 0x. Turns out it's just to let the parser know that it's a hexadecimal base number instead of other bases. I've seen a lot of definitions and explanations, but what I really need is an example that I can run. I still don't understand how the data flows and how the operations are carried out. To see what's going on inside the chip, I probably need something like PicoReg . It's a debugger for the Raspberry Pi Pico. The advantage is that it's written entirely in Python I can set it up on a Raspberry Pi. I can wrap my head around that. I'd like to try to implement a PIO+DMA stepper controller with acceleration ramping. But I likely won't have time for that this week. V. Hunter Adams at Cornell has implemented it , but to use it you need to set up a toolchain for C. This forum post is also interesting. cleverca22 's comment on this post may also hold clues. Seemingly unrelated, but there may also be some clues in the servo easing algorithm. There are stepper control PIO programs here and here . But they don't implement ramping. And again, you need to use the C/C++ SDK. The RP2040 datasheet says that if you want to get started with PIO, a walkthrough of writing your first PIO program is in the Pico C/C++ SDK. It also covers using PIO with DMA. But I want to use Python. Or at least I want to be able to make a library for Python. Is it possible to do that in C/C++? Yes. But if you build an external C module for MicroPython, you have to build your own MicroPython firmware to put on the RP2040. There's another possibility , which I like better: An alternative approach is to use Native machine code in .mpy files which allows writing custom C code that is placed in a .mpy file, which can be imported dynamically in to a running MicroPython system without the need to recompile the main firmware. I flipped quickly through all the communications protocols but read PWM more carefully. If I will use brushless motors in my robot arm, I need to get to know Pulse Width Modulation. It's interesting how flexible the pin mapping on the RP2040 is, the PWM slices can use any of the GPIO pins, and so can PIO. I actually understood a part of the description of how a PWM slice works. It's a 16-bit counter that the wraps back to zero. Wrap is a command in pioasm that enables you to jump back to the beginning without using a JMP command, and it takes 0 cycles. I'm beginning to understand a little bit, even if I can't use these things yet. I guess I could DMA to push a sine wave from a lookup table to three PWM outputs to control a three-phase brushless motor. Then the main processors would be free to do other tasks. This arbitrary wave generator using PIO and DMA may be worth taking a look at when I have some time. There's a lot going on inside this $1 chip. It's like a whole city, complete with different neighborhoods connected together with highways and smaller roads to move data between places. All roads lead to the C/C++ SDK, it seems. Here are community libraries for the SDK. There are also lots of examples in the Raspberry Pi GitHub repo. I will have to jump in at some point. One question remains: What is the OSR? There is a bit of programmer humor in the datasheet, on page 359 it says that I2C is an ubiquitous serial bus first described in the Dead Sea Scrolls, and later used by Philips Semiconductor. On page 364 it is conjectured that the PIO can run DOOM with a high enough clock speed. On page 365 it says that a full 32-bit addition takes the PIO only around one minute at 125 MHz. What? So using the PIO for mathematical operations takes hundreds of millions of cycles?","title":"PIO"},{"location":"assignments/week04.html#dma","text":"Direct Memory Access is the other interesting feature in the RP2040. It's a memory controller that can copy data from one place to another very fast without the processor's intervention. Throughout the RP2040 datasheet there are mentions that this and that part of the chip has a DMA interface. It looks like it's important to learn to use DMA if you want to make things run fast. Could DMA be used to feed a PIO state machine with acceleration ramps for a stepper? I would like to learn a little bit about how DMA works and how to set it up, but it seems that it's not possible in MicroPython. The Raspberry Pi Pico C/C++ SDK instructions mention that you can use DMA by including a library called hardware_dma. Setting up a C/C++ toolchain sounds intimidating, but maybe I have to do it if I want to try using DMA. I think DMA programming is too complicated to get into for now.","title":"DMA"},{"location":"assignments/week04.html#adc","text":"The Analog to Digital Converter takes 96 clock cycles to make a 12-bit measurement. That's good to know. The RP2040 ADC has a few errors, most notably that the quantization error looks like a sawtooth. Also, because the wrong size of capacitor was used in one part of the ADC, there are big spikes in differential non-linearity. I won't pretend to know what that is, but it means that there will be spikes in the measured values in four places. The scaling factor for the internal temperature sensor is specified here in the datasheet. You can see it an example code in the Chip temperature section below. I looked at the Raspberry Pi Pico Python SDK and found it a bit thin. I also skimmed Getting started with Raspberry Pi Pico, which shows how to set up a C/C++ programming environment for the Pi Pico, and it looks extremely complicated. But that's where the juicy stuff is.","title":"ADC"},{"location":"assignments/week04.html#programming-the-xiao-rp2040","text":"The Xiao RP2040 has a reset button, so it's more convenient to use in that respect than the Raspberry Pi Pico.","title":"Programming the Xiao RP2040"},{"location":"assignments/week04.html#hello-rp2040","text":"I tried Neil Gershenfeld's Hello RP2040 Python program: I like having an RGB LED to play with. It could serve as a simple interface for the wake-up mask or the robot arm if I assign a different meaning to every color.","title":"Hello RP2040"},{"location":"assignments/week04.html#chip-temperature","text":"I did this one with the Raspberry Pi Pico. Setup and programming is identical to the Xiao RP2040. I found a nice tutorial on using the temperature sensor which is built into the RP2040 chip. It's useful to make sure that the chip doesn't overheat, but it's also a good exercise in measuring an analog voltage. Since I've tried blinking the onboard LED already, I'm going to use that knowledge to modify this program to turn on the LED when the temperature crosses a threshold: from machine import ADC , Pin import time led = machine . Pin ( \"LED\" , machine . Pin . OUT ) adc = machine . ADC ( 4 ) while True : ADC_voltage = adc . read_u16 () * ( 3.3 / ( 65535 )) temperature = 27 - ( ADC_voltage - 0.706 ) / 0.001721 print ( \"Temperature: {} \u00b0C\" . format ( temperature )) if temperature > 26 : led . value ( 1 ) else : led . value ( 0 ) time . sleep_ms ( 100 ) I added the if statement and removed the temperature in Fahrenheit. You can see that you need to scale the analog measurement to get the temperature in degrees Celsius. I suspect that that every chip will give a slightly different value because of manufacturing variability. So it might be better to measure the temperature with a better sensor and put that value into the scaling factor. But since this sensor isn't meant to be super precise, we'll let it be.","title":"Chip temperature"},{"location":"assignments/week04.html#interfacing-with-lcd-screen","text":"I connected a classic 16x2 character LCD screen to the Raspberry Pi Pico, but it didn't work. I needed to use the 4-bit mode (where you connect the LCD directly to the microcontroller) because I didn't have an LCD driver chip. Aby Michael's Fab Academy site had a useful diagram with the LCD pins. The ElectronicWings diagram was even more useful. After some Googling I found that the screen needs 5V signals. The RP2040 is a 3.3V chip, so that's why I'm getting glitchy results. Its strange that it worked with a Pico in the tutorial that I used (click the link in the video description for a connection diagram). I'd like to try the small OLED screen that is in the Fab Lab inventory next. That one is 3-5V tolerant, and much smaller and versatile than the 16x2. And it only needs four pins. Look at that nest of wires above! The OLED will be a great improvement. Here are the files I used, you just open each of them in Thonny and save them to your Pico. The main.py file runs automatically when you power the Pico on. The other two are a library for interfacing with the LCD. Beware, this didn't work for me. Download main.py Download lcd_api.py Download gpio_lcd.py","title":"Interfacing with LCD screen"},{"location":"assignments/week04.html#interfacing-with-oled-screen","text":"I used Kevin McAleer's simple code example for the SSD1306 OLED screen. It just writes Test 1 to the screen. It was exhilarating to see the tiny letters light up on the screen on the first try! I then changed the text to something more useful, a prototype display for the Frankenstein MCU , which Fran and my instructor \u00de\u00f3rarinn are working on. The plan is to make a WiFi connected button in every Fab Lab and the staff push the button every day to show that the lab is active. The connections between labs appear on a spinning globe made in Three.js, which runs in the browser. Download oled.py","title":"Interfacing with OLED screen"},{"location":"assignments/week04.html#stepper-control-with-rp2040","text":"This one I also did with the Raspberry Pi Pico. from machine import Pin from time import sleep IN1 = Pin ( 2 , Pin . OUT ) IN2 = Pin ( 3 , Pin . OUT ) IN3 = Pin ( 4 , Pin . OUT ) IN4 = Pin ( 5 , Pin . OUT ) pins = [ IN1 , IN2 , IN3 , IN4 ] sequence = [[ 1 , 0 , 0 , 0 ],[ 0 , 1 , 0 , 0 ],[ 0 , 0 , 1 , 0 ],[ 0 , 0 , 0 , 1 ]] while True : for step in sequence : for i in range ( len ( pins )): pins [ i ] . value ( step [ i ]) sleep ( 0.01 ) Controlling the stepper is surprisingly simple. It has four phases, so you connect it to four pins on the Pico. Then you just alternate which pin is HIGH while the others are low. The stepper motor has 64 steps per rotation, but it also has a 1/64 gearing ratio, so in total it has 64 * 64 = 4096 steps per rotation. That's pretty good for a 3 dollar stepper! I bought a set of five 28-BYJ-48 steppers with ULN2003 drivers for $15. They're cheap and precise! But there are two drawbacks. They're quite weak. For a motor with 1:64 gearing, it's surprisingly easy to make it skip steps. Also, the gearbox introduces backlash which is much bigger than the step size. The step size is 0.09\u00b0 but the backlash seems to be a few degrees. Maybe it's possible to correct for the slop in software every time the motor changes direction. But that won't work 100% and definitely not with small motions. I wonder if these motors are a good fit for my robot arm. In the video above I changed the sleep value and then ran the program. First it was 0.1 s, then 0.01 s and finally 0.001 s. When I went below 0.01 s, the stepper stalled. It's fun to have status LEDs on all the phases. At slow speeds you can see how the phases are turned on and off. I want to have status LEDS on every output pin on every microcontroller board! It's a really handy debugging tool. I laser cut a press-fit acrylic arrow to see the motor's movement better. Since I have the 12V version of the 28BYJ-48 motors, I can run them on a 9V battery. So here's my first foray into battery-powered electronics. The Pico is still powered by a USB cable, though. I need to learn how to make a 5V regulator board, so that I can also power the Pico using the 9V battery.","title":"Stepper control with RP2040"},{"location":"assignments/week04.html#pio-stepper-control","text":"I followed a tutorial and wrote a program that controls a stepper with PIO, without using the processor at all. I then modified it to include four PIO state machines that run the same program but are clocked at different frequencies: Here's the code: from machine import Pin from rp2 import PIO, StateMachine, asm_pio from time import sleep import sys @asm_pio(set_init=(PIO.OUT_LOW,) * 4) def prog(): wrap_target() set(pins, 8) [31] #8 nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] set(pins, 4) [31] #4 nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] set(pins, 2) [31] #2 nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] set(pins, 1) [31] #1 nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] wrap() sm0 = StateMachine(0, prog, freq=100000, set_base=Pin(2)) sm1 = StateMachine(1, prog, freq=50000, set_base=Pin(6)) sm2 = StateMachine(2, prog, freq=25000, set_base=Pin(10)) sm3 = StateMachine(3, prog, freq=12500, set_base=Pin(18)) sm0.active(1) sm1.active(1) sm2.active(1) sm3.active(1) sleep(5) sm0.active(0) sm1.active(0) sm2.active(0) sm3.active(0) sm0.exec(\"set(pins,0)\") sm1.exec(\"set(pins,0)\") sm2.exec(\"set(pins,0)\") sm3.exec(\"set(pins,0)\") nop() is no operation, and you can optionally add a delay after every command, like this: [1] . That was a delay of one clock cycle. I think 31 cycles is the maximum.","title":"PIO stepper control"},{"location":"assignments/week04.html#adding-a-reset-button","text":"The Raspberry Pi Pico has no reset button, but it resets if you connect the RUN pin to ground. So I just connected a jumper to the RUN pin on the breadboard and make the other end of the jumper touch the GND pin that is one pin over to reset the Pico. That's more convenient than unplugging the USB cable and plugging it in again.","title":"Adding a Reset button"},{"location":"assignments/week04.html#overclocking-the-rp2040","text":"I tried Chris DeHut's RP2040 overclocking video . He has lots of good stuff about the Pico. Here's his program which changes the clock speed a few times and measures the time it takes to do 100.000 sets of the calculations in the Do_Stuff function: ''' PICO default clock speed is 125 MHz Demo to show time to make a bunch of basic math calculations at varaious clock speeds that the PICO can handle ''' import machine import time import machine led_onboard = machine . Pin ( 25 , machine . Pin . OUT ) def Do_Stuff (): st = time . ticks_ms () Y = 0 while Y < 100000 : Y += 1 Z = 57 Z1 = Z + Y Z2 = Z - Y Z3 = Z * ( Z + Y ) #print(Y, Z1, Z2, Z3) led_onboard . value ( 0 ) #print(Y) et = time . ticks_ms () #print(et, st, et-st) return et - st cntr = 0 while cntr < 2 : #run whole test several times for observation cntr += 1 machine . freq ( 125000000 ) #set clock to 125 MHz x = machine . freq () t = Do_Stuff () print ( \" \\n @\" , x , \" time to run =\" , t , \"ms\" ) machine . freq ( 140000000 ) #set clock to 140 MHz x = machine . freq () t = Do_Stuff () print ( \" \\n @ \" , x , \" time to run =\" , t , \"ms\" ) machine . freq ( 200000000 ) #set clock to 200 MHz x = machine . freq () t = Do_Stuff () print ( \" \\n @ \" , x , \" time to run =\" , t , \"ms\" ) machine . freq ( 250000000 ) #set clock to 250 MHz x = machine . freq () t = Do_Stuff () print ( \" \\n @ \" , x , \" time to run =\" , t , \"ms\" ) machine . freq ( 125000000 ) #set clock to 125 MHz to clean things up print ( \" \\n All Done Folks!\" ) The loop runs a few times so that you can take the average of the time measurements. This is one set of measurements: @ 125000000 time to run = 1167 ms @ 140000000 time to run = 1042 ms @ 200000000 time to run = 729 ms @ 250000000 time to run = 583 ms That's a big difference by varying the clock speed, and the RP2040 datasheet says that overclocking is usually safe. I will keep this in mind when my programs are running slowly. Then I tried going a bit higher. It worked at 280 MHz, but at 290 or 300 MHz, I got no response from the microcontroller. So this is the best time: @ 280000000 time to run = 521 ms I think I will not go above 250 MHz. That's a lot, twice as fast as the standard RP2040 with a 12 MHz crystal, like in the Pico and the Xiao RP2040. There are instructions online on how to go up to 420 MHz and there is at least one commercial board that runs the RP2040 at 400 MHz.","title":"Overclocking the RP2040"},{"location":"assignments/week04.html#custom-performance-test","text":"I modified the PIO program to control four steppers at different speeds, by modifying the clock dividers in each of the state machines. The state machines all run the same program. It works, and there should be no load on the main processor. The video looks the same as the four stepper video above. Now let's verify that there is no load on the main processor. I'll take Chris DeHut's 100 thousand calculation routine and put it into the PIO stepper program. Here are the results while driving four steppers at different speeds at the same time: @ 125000000 time to run = 1167 ms @ 140000000 time to run = 1042 ms @ 200000000 time to run = 729 ms @ 250000000 time to run = 584 ms Those are the same times as the first case, which had no steppers. Wow! I then increased the number of calculation loops to 300 thousand, so that we can see what happens to the steppers as we increase the main clock frequency from 125 MHz to 140, 200 and finally 250 MHz. As you can see from the video below, the steppers speed up until the fastest stepper stalls when the clock speed goes up to 250 MHz. For comparison with the PIO routine, I also tried to mix the 100k calculation code with code where the processor controls four steppers at the same time, but i couldn't get those two things to happen at the same time. But I could probably run those things on core0 and core1 with good results. Let's try, using this tutorial to learn how threads work in Python: @ 125000000 time to run = 1181 ms @ 140000000 time to run = 1053 ms @ 200000000 time to run = 734 ms @ 250000000 time to run = 587 ms It worked! My first dual-core program! The steppers just kept on running on core1 after the calculations finished on core0. And the calculation times are good! They're just a few milliseconds longer the 4 stepper PIO + 100k calculation routine. Here's the code: import machine import time import machine from machine import Pin from rp2 import PIO, StateMachine, asm_pio from time import sleep import sys led_onboard = machine.Pin(25, machine.Pin.OUT) @asm_pio(set_init=(PIO.OUT_LOW,) * 4) def prog(): wrap_target() set(pins, 8) [31] #8 nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] set(pins, 4) [31] #4 nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] set(pins, 2) [31] #2 nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] set(pins, 1) [31] #1 nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] wrap() sm0 = StateMachine(0, prog, freq=50000, set_base=Pin(2)) sm1 = StateMachine(1, prog, freq=25000, set_base=Pin(6)) sm2 = StateMachine(2, prog, freq=12500, set_base=Pin(10)) sm3 = StateMachine(3, prog, freq=6250, set_base=Pin(18)) sm0.active(1) sm1.active(1) sm2.active(1) sm3.active(1) #sleep(5) def Do_Stuff(): st = time.ticks_ms() Y = 0 while Y < 300000:+\u00f0\u00f0\u00f0\u00f0 Y += 1 Z = 57 Z1 = Z + Y Z2 = Z - Y Z3 = Z * (Z + Y) #print(Y, Z1, Z2, Z3) led_onboard.value(0) #print(Y) et = time.ticks_ms() #print(et, st, et-st) return et-st cntr = 0 while cntr < 2: #run whole test several times for observation cntr += 1 machine.freq(125000000) #set clock to 125 MHz x = machine.freq() t = Do_Stuff() print(\"\\n@\", x, \" time to run =\", t, \"ms\") machine.freq(140000000) #set clock to 140 MHz x = machine.freq() t = Do_Stuff() print(\"\\n@ \", x, \" time to run =\", t, \"ms\") machine.freq(200000000) #set clock to 200 MHz x = machine.freq() t = Do_Stuff() print(\"\\n@ \", x, \" time to run =\", t, \"ms\") machine.freq(250000000) #set clock to 250 MHz x = machine.freq() t = Do_Stuff() print(\"\\n@ \", x, \" time to run =\", t, \"ms\") machine.freq(125000000) #set clock to 125 MHz to clean things up print(\"\\n All Done Folks!\") sm0.active(0) sm1.active(0) sm2.active(0) sm3.active(0) sm0.exec(\"set(pins,0)\") sm1.exec(\"set(pins,0)\") sm2.exec(\"set(pins,0)\") sm3.exec(\"set(pins,0)\")","title":"Custom performance test"},{"location":"assignments/week04.html#xiao-samd21","text":"The SAMD21 runs at 48 MHz, which is considerably slower than the RP2040. But it's easy to solder. I'll probably use it because of that. It also has a DAC output. For hobbyists, the SAMD21 can only be programmed in the Arduino IDE and CircuitPython. For professionals, you can use Microchip Studio or set up your own toolchain. So I set up the board in the Arduino IDE the way you would set up any new board: I added the proper link into \"Additional Boards Manager URLs\" in Preferences, and then I could find Seeed SAMD21 Boards in the Boards Manager and install them. It's as easy as that, and then selecting the right board (Seeeduino Xiao). The setup is covered in this tutorial . I tried connecting it to the Arduino IDE but it didn't work.","title":"Xiao SAMD21"},{"location":"assignments/week04.html#xiao-esp32-c3","text":"The ESP32 C3 runs at 160 MHz, which is fast, but it's not possible to overclock it. But the most amazing thing is that this tiny microcontroller has WiFi and Bluetooth built in! This is my first RISC-V chip. As Neil says, RISC-V will probably take over the world. It's an open source architecture, so unlike the ARM chips that are found in microcontrollers, billions of smartphones and all the way up to supercomputers, manufacturers don't have to pay huge licensing fees to make RISC-V chips. I tried connecting it to the Arduino IDE but it didn't work.","title":"Xiao ESP32 C3"},{"location":"assignments/week04.html#esp-32-cam","text":"ESP-32 CAM is a 10 dollar WiFi camera! I had to have one and try it out. It might play well with my robot arm. I followed this tutorial to set up the ESP32-CAM board. To install the ESP32 boards in the Arduino IDE, I followed that tutorial . Then I tested the board with this tutorial . I uncommented the AI-Thinker CAM definition in the setup and input my WiFi credentials in the sketch. When the Arduino IDE had uploaded the code, the following message was left hanging: Leaving... Hard resetting via RTS pin... But then I checked and saw that above it, the status bar said \"Done uploading\". I also found this explanation , which said that this isn't an error at all. First I saw nothing in the serial monitor. Then I removed an extra #define camera line. Still nothing. Then I switched to the 5G network, the same as my laptop is on. Then I saw a series of dots form .... but no IP address. But when I did in the opposite order from the tutorial (first pressed RESET, then removed the jumper) I got this message: ets Jul 29 2019 12:21:46 rst:0x1 (POWERON_RESET),boot:0x3 (DOWNLOAD_BOOT(UART0/UART1/SDIO_REI_REO_V2)) waiting for download ets Jul 29 2019 12:21:46 Something's happening! I removed the jumper and pressed reset again: rst:0x1 (POWERON_RESET),boot:0x13 (SPI_FAST_FLASH_BOOT) configsip: 0, SPIWP:0xee clk_drv:0x00,q_drv:0x00,d_drv:0x00,cs0_drv:0x00,hd_drv:0x00,wp_drv:0x00 mode:DIO, clock div:1 load:0x3fff0030,len:1344 load:0x40078000,len:13836 load:0x40080400,len:3608 entry 0x400805f0 . WiFi connected Camera Ready! Use 'http://192.168.1.32' to connect Success! After scrolling down and pressing \"Start stream\", I could see a live stream from the camera over WiFi to my browser! At QVGA resolution (320x240), the stream is smooth. At UXGA resolution (1600x1200), the stream stutters. SVGA (800x600) is the highest resolution where lag isn't very noticeable. That's pretty good for a microcontroller! I love it. And I find that in low light, greyscale looks a lot better than a color image. Note This time I used ffmpeg -i esp32-cam_test1.mp4 -vf scale=400:-2 -vcodec libx264 -an -crf 20 esp32-cam_test1_web.mp4 to compress the video and remove the audio as shown here . This video shows ways to control the image capture just by typing URLs into the browser. If you have a computer nearby at all times, you can also stream video from the ESP32-CAM to any browser in the world. And here's a guide to modifying the HTML code inside the Arduino code. It's not obvious how to do it, since the HTML has been converted to hex code in the Arduino sketch. I checked whether it's possible to overclock the ESP32, but 240 MHz seems to be the maximum clock speed.","title":"ESP-32 CAM"},{"location":"assignments/week04.html#fpga","text":"I bought an FPGA development board from Seeed Studio. It is the Runber board , which contains a Gowin FPGA. I took the plunge because it is the cheapest FPGA board I've found that still has a set of tutorials . I looked at some of the documentation and it looks intimidating. I applied for a software license on a Friday and got it in the beginning of the next week. I haven't had the time to set it up and go through a tutorial, though. .md-content__button { display: none; }","title":"FPGA"},{"location":"assignments/week05.html","text":"3D Scanning and Printing Benchmarks I made a few test prints to characterize the Ultimaker 2+ and Ultimaker 3. These are our two workhorses. The classic 3D printing benchmark is the 3D Benchy by Creative Tools in Sweden. It came out well, with only a tiny bit of drooping in the top of the doorway. A nice 3D Benchy. I printed this tolerance test by 3D Maker Noob. The clearance test turned out well, with only the tightest clearance (0.1 mm) getting fused together. I would say that's very good. I've 3D printed professionally for years, but I'm still amazed how precise these computer-controlled hot glue guns can be. A hot glue gun is actually the origin of FDM 3D printing , by the way. 3D scanning I couldn't think of anything to 3D scan but just at the right time, an artist came into the lab with a small sculpture and asked if we could 3D scan and print it. I gave it my best shot. The sculpture to be scanned. The creature is based on a picture of the old Icelandic mythical beast Fj\u00f6rulalli, which looks mostly like a seal and lives on the beach. It's most commonly found in the West Fjords and Brei\u00f0afj\u00f6r\u00f0ur. It looked like a seal and acted like a seal. The Fj\u00f6rulalli is probably our least mysterious mythical creature. But I really like Angela Muenther's sculpture of it. I followed Christopher Barnatt's excellent tutorial in this project. We started by clearing a table and closing the blinds, because you want nice, even light for photogrammetry. We placed the little guy on some fabric with a pattern, because that helps the algorithm locate the pictures in space afterwards and stitch them together. Then we took pictures on a small point-and-shoot camera all around the sculpture in circles with increasing height. Then I loaded the images into the excellent Meshroom , which is free and open-source. I had tried Meshroom once before, but it didn't work for some reason, so I gave up. This time there was time pressure, so I became more resourceful. I found that the path to the images contained Icelandic characters. Once I had fixed that, the program started running, the computer started humming and in about an hour and a half, I had a 3D point cloud! Raw point cloud in Meshroom. Then I took the mesh into Meshmixer and deleted everything that I didn't want to include. The final 3D processing step was in Blender, where I filled in the bottom of the model to make the mesh watertight and then smoothed it in a few places, using my new Blender skills from having gone through the donut tutorial last week. Loop select and fill in Blender. The smoothing tool in the sculpture environment in Blender. Then I 3D printed two sculptures in almost exactly the same gray color as the original sculpture. One had smoothing, the other had none. When Angela came to examine them the next day, she said she wanted something in between, so I started again and did the smoothing with more finesse this time. Then I printed some more of them, in a few different materials. One in transparent PETG and one in glow-in-the-dark PLA. The output of this week. I used this tip to get the printing temperature right for PETG, for the transparent sculpture print. PETG print with Lighning infill in Cura. Angela really liked the glow-in-the-dark version. So do I. Angela, far left, describing her exhibition. The exhibition at Galler\u00ed \u00dathverfa . Design for 3D printing The next task was to design and 3D print something that you can't make subtractively (e.g. using milling). I've been interested in lattice structures for a while, so I took a stab at making one. The 3D sketch in Fusion 360. I sweep the circle along the line to make a cylinder. Then I repeat. I made a rectangular pattern of this lattice and 3D printed it. The red one is hard PLA and the white one is soft TPU. Download lattice test file I really liked the soft lattice, so I wanted to make a bigger version of it. I made a much bigger rectangular pattern in Fusion 360. Then I modeled a ball and used it to cut the pattern. Fortunately my laptop is quite powerful, so it went without a hitch. This is probably the biggest pattern I've made. Then I thought about how to print this thing. It needs support on the bottom side, but will regular supports generated by Cura be good enough? I decided to model the supports myself. To decide on the gap between the model and support structure, I used this reference . I decided to go for a 0.2 mm air gap, so I offset the circle by 0.4 mm. Support bowl for the lattice structure. Removing the supports went really smoothly! After cutting with scissors, I could tear the ball away from the bowl. \u00de\u00f3rarinn immediately suggested that I turn the ball into a lamp. I used this guide from Creality to adjust the Generic TPU profile in Cura for the CR-10 printer. The CR-series printers are OK for tinkerers but not for Fab Labs or other professional users. I would not let anyone use the CR-10 printers unsupervised. You have to level the bed almost every time you print, and often make adjustments after you start printing. Usually I need to start printing two or three times. It's not an enjoyable experience. Contrast that with the Ultimaker 2+ (I have one myself and it's my favorite 3D printer). It just prints, and you rarely have to level it. It's a real workhorse. I've printed for over 2000 hours on my machine and it's still printing like it's new. I don't like the direction Ultimaker is taking, however, with super expensive printers for professional users. I printed the ball using TPU (thermoplastic polyurethane) and it bounces and works great as a ball to play with! I used 1.75 mm TPU on the Creality CR-10, which has a Bowden extruder. What that means is that it pushes on the filament and it acts like a long, soft spring. So to get good results I had to print really slowly. The print took five days to complete. The lattice ball has become one of the favorite objects to play with in the lab. You can even bounce it off the floor without breaking it. Here are my students in the Fab Lab course at the local high school: Unfortunately the Fusion 360 file is 65 MB, so I can't include it here. But I've put the STL file on Sketchfab . .md-content__button { display: none; }","title":"5. 3D Scanning and Printing"},{"location":"assignments/week05.html#3d-scanning-and-printing","text":"","title":"3D Scanning and Printing   "},{"location":"assignments/week05.html#benchmarks","text":"I made a few test prints to characterize the Ultimaker 2+ and Ultimaker 3. These are our two workhorses. The classic 3D printing benchmark is the 3D Benchy by Creative Tools in Sweden. It came out well, with only a tiny bit of drooping in the top of the doorway. A nice 3D Benchy. I printed this tolerance test by 3D Maker Noob. The clearance test turned out well, with only the tightest clearance (0.1 mm) getting fused together. I would say that's very good. I've 3D printed professionally for years, but I'm still amazed how precise these computer-controlled hot glue guns can be. A hot glue gun is actually the origin of FDM 3D printing , by the way.","title":"Benchmarks"},{"location":"assignments/week05.html#3d-scanning","text":"I couldn't think of anything to 3D scan but just at the right time, an artist came into the lab with a small sculpture and asked if we could 3D scan and print it. I gave it my best shot. The sculpture to be scanned. The creature is based on a picture of the old Icelandic mythical beast Fj\u00f6rulalli, which looks mostly like a seal and lives on the beach. It's most commonly found in the West Fjords and Brei\u00f0afj\u00f6r\u00f0ur. It looked like a seal and acted like a seal. The Fj\u00f6rulalli is probably our least mysterious mythical creature. But I really like Angela Muenther's sculpture of it. I followed Christopher Barnatt's excellent tutorial in this project. We started by clearing a table and closing the blinds, because you want nice, even light for photogrammetry. We placed the little guy on some fabric with a pattern, because that helps the algorithm locate the pictures in space afterwards and stitch them together. Then we took pictures on a small point-and-shoot camera all around the sculpture in circles with increasing height. Then I loaded the images into the excellent Meshroom , which is free and open-source. I had tried Meshroom once before, but it didn't work for some reason, so I gave up. This time there was time pressure, so I became more resourceful. I found that the path to the images contained Icelandic characters. Once I had fixed that, the program started running, the computer started humming and in about an hour and a half, I had a 3D point cloud! Raw point cloud in Meshroom. Then I took the mesh into Meshmixer and deleted everything that I didn't want to include. The final 3D processing step was in Blender, where I filled in the bottom of the model to make the mesh watertight and then smoothed it in a few places, using my new Blender skills from having gone through the donut tutorial last week. Loop select and fill in Blender. The smoothing tool in the sculpture environment in Blender. Then I 3D printed two sculptures in almost exactly the same gray color as the original sculpture. One had smoothing, the other had none. When Angela came to examine them the next day, she said she wanted something in between, so I started again and did the smoothing with more finesse this time. Then I printed some more of them, in a few different materials. One in transparent PETG and one in glow-in-the-dark PLA. The output of this week. I used this tip to get the printing temperature right for PETG, for the transparent sculpture print. PETG print with Lighning infill in Cura. Angela really liked the glow-in-the-dark version. So do I. Angela, far left, describing her exhibition. The exhibition at Galler\u00ed \u00dathverfa .","title":"3D scanning"},{"location":"assignments/week05.html#design-for-3d-printing","text":"The next task was to design and 3D print something that you can't make subtractively (e.g. using milling). I've been interested in lattice structures for a while, so I took a stab at making one. The 3D sketch in Fusion 360. I sweep the circle along the line to make a cylinder. Then I repeat. I made a rectangular pattern of this lattice and 3D printed it. The red one is hard PLA and the white one is soft TPU. Download lattice test file I really liked the soft lattice, so I wanted to make a bigger version of it. I made a much bigger rectangular pattern in Fusion 360. Then I modeled a ball and used it to cut the pattern. Fortunately my laptop is quite powerful, so it went without a hitch. This is probably the biggest pattern I've made. Then I thought about how to print this thing. It needs support on the bottom side, but will regular supports generated by Cura be good enough? I decided to model the supports myself. To decide on the gap between the model and support structure, I used this reference . I decided to go for a 0.2 mm air gap, so I offset the circle by 0.4 mm. Support bowl for the lattice structure. Removing the supports went really smoothly! After cutting with scissors, I could tear the ball away from the bowl. \u00de\u00f3rarinn immediately suggested that I turn the ball into a lamp. I used this guide from Creality to adjust the Generic TPU profile in Cura for the CR-10 printer. The CR-series printers are OK for tinkerers but not for Fab Labs or other professional users. I would not let anyone use the CR-10 printers unsupervised. You have to level the bed almost every time you print, and often make adjustments after you start printing. Usually I need to start printing two or three times. It's not an enjoyable experience. Contrast that with the Ultimaker 2+ (I have one myself and it's my favorite 3D printer). It just prints, and you rarely have to level it. It's a real workhorse. I've printed for over 2000 hours on my machine and it's still printing like it's new. I don't like the direction Ultimaker is taking, however, with super expensive printers for professional users. I printed the ball using TPU (thermoplastic polyurethane) and it bounces and works great as a ball to play with! I used 1.75 mm TPU on the Creality CR-10, which has a Bowden extruder. What that means is that it pushes on the filament and it acts like a long, soft spring. So to get good results I had to print really slowly. The print took five days to complete. The lattice ball has become one of the favorite objects to play with in the lab. You can even bounce it off the floor without breaking it. Here are my students in the Fab Lab course at the local high school: Unfortunately the Fusion 360 file is 65 MB, so I can't include it here. But I've put the STL file on Sketchfab . .md-content__button { display: none; }","title":"Design for 3D printing"},{"location":"assignments/week06.html","text":"Electronics Design Designing a simple board in KiCAD Here's a video where I describe what I did in Electronics Design week to Neil Gershenfeld during random review. A few months ago I went through a short KiCAD tutorial, to get to know the program a little bit. But I got a lot more out of designing a little PCB with an LED and a button and making it on the vinyl cutter this week. Designing something without making it doesn't get me excited to do more stuff. After making the little circuit, I really want to make more. Let's get into it. The fab library I cloned the Fab Lab parts library for KiCAD from the Gitlab repository. It's maintained by Krisjanis Rijnieks in Finland. To use the library, I needed to upgrade to KiCAD 7. I started by going into Preferences and Manage Symbol Libraries. There I clicked the + symbol to import a new library and navigated to the location of the fab library, which is called fab.kicad_sym. Then I went into Preferences and Manage Footprint Libraries and did the same, but that file is called fab.pretty. KiCAD has many parts in its libraries, but the fab library has been the most useful by far. The schematic Having imported all the components in the Fab Lab Inventory , I could get started with my design. I created a new design in a new folder. In the Schematic editor I pressed A on the keyboard to add components, and added a 1206 diode, a 1206 resistor to go with it, the Xiao RP2040 module and a pushbutton. I also added a power symbol and a ground symbol. Then I used the wire tool on the right side of the interface to connect the components together in a way that made sense to me. My first schematic. Routing the PCB Next, I opened the PCB Editor. I placed the components there all at once. They were connected by thin lines called a rat's nest, but the routing of the physical traces requires a second step. I moved the parts into a pleasing arrangement and then connected them together with the wire tool in the toolbar on the right. After selecting Update PCB from Schematic, I got the parts all in a bundle. Then I realized that I probably had the wrong button, so I swapped it out. Then I arranged the parts in a pleasing way routed traces between them by pressing X on the keyboard. Arranged and routed. 3D Viewer Then I tried the 3D Viewer (View -> 3D Viewer) and got a warning that I needed to define a board outline. Board outline missing. So I added a board outline to Edge Cuts. A day or two earlier, my elder son Ernir said that he wanted to make an electric thing in the shape of a circle, and close it off on all sides except one, so that we could connect it to another circle. So I made the outline a circle. Circular board outline. I moved the components around some more until I was happy. Then I pressed Alt + 3 on the keyboard to get a 3D preview. That was underwhelming, since none of the components I used have a 3D model associated with them. I wanted at least to have the Xiao module, so that I could design an enclosure around its USB connector (if I had the time). I found the model on the SeeedStudio web site . It includes a Fusion 360 design file, but I only need the STEP file. I put the STEP file into the fab.3dshapes folder in the fab library and used this tutorial to connect it to the KiCAD footprint. Now we're talking! I can see the 3D model being useful when designing enclosures. A few fixes I exported the design as an SVG and opened it in Inkscape. It wasn't until I saw it there that I realized that the traces were too thin to cut on the vinyl cutter. I needed to go back into the PCB Editor and use a quick and dirty way to change the track width. I selected one part of each trace, then pressed U to select the whole trace and then pressed E to open the trace properties. There I could change the trace width from 0.25 mm to 1 mm. That's better. Next time I'll set the track width from the start in the proper way using Netclasses in File -> Board Setup. That's much more convenient, since every new trace will have the width that I've defined. This is how you set the trace width properly for the whole board. Now I have a completed design in the PCB editor, ready to export to Inkscape: Xiao RP2040 LED board with a button. Here are the KiCAD files for this project, including the PDF file that is ready for cutting: Download KiCAD project Selecting a resistor for the LED Now I needed to find the right resistor to use with the blue LED. I found a very useful tutorial from Digikey, which is where I got my parts from. Diodes are a one-way valve for electrons. Electrons can go one way, but not the other. When diodes let current through, they have almost no resistance, and that's great. Then someone discovered that diodes can emit light. Now LEDs are everywhere and they have one caveat: You can't just connect one to a power source and have it work. Because it's such a good conductor, it will let more current through that it can handle. It will burn up, or even blow up! So we need a helper component. Every LED needs a current limiting resistor in order to survive. So, how do we select the right resistor for our blue LED? Let's use Ohm's Law: Ohm's Law pyramid. Coincidentally, VIR means 'wire' in Icelandic. It's fun to be able to make graphics like these in Inkscape and render them easily on a web page. Ok, I need to know the Source voltage: 3.3 V LED forward voltage: 3.2 V (max 3.5 V) LED's recommended operating current: 30 mA But there's a catch: After reading the datasheet, I know that RP2040 can only supply 16 mA from each GPIO pin. So that's the number I'll use. The voltage drop across the resistor needs to be \\[V_{source}-V_{forward}\\] \\[3.3 V - 3.2 V = 0.1 V\\] Now let's use the pyramid to get an expression for the resistor: \\[R = \\frac{V}{I} = \\frac{0.1V}{0.016A} = 6.25 \u03a9\\] I have the resistance! But there's one more step: I need to make sure that the resistor can take the heat. I'll use the power formula to see the power dissipated by the resistor: \\[P = I\u22c5V\\] \\[ = 0.016 A \u22c5 0.1 V \\] \\[ = 0.0016 W \\] \\[ = 1.6 mW \\] That's insignificant. At the Fab Lab I have 4.99\u03a9 and 10\u03a9 resistors. Let's check the current through the LED: 4.99\u03a9: \\[I = \\frac{V}{R} = \\frac{0.1V}{4.99\u03a9} = 0.02A \\] 10\u03a9: \\[I = \\frac{V}{R} = \\frac{0.1V}{10\u03a9} = 0.01A\\] OK, I'll use the 10\u03a9 resistor. Using an oscilloscope I programmed a Raspberry Pi Pico to output stepper motor control signals. This is the same MicroPython code as I used in Embedded Programming week. It's a very simple way of creating signals for a 28BYJ-48 stepper motor . from machine import Pin from time import sleep IN1 = Pin ( 2 , Pin . OUT ) IN2 = Pin ( 3 , Pin . OUT ) IN3 = Pin ( 4 , Pin . OUT ) IN4 = Pin ( 5 , Pin . OUT ) pins = [ IN1 , IN2 , IN3 , IN4 ] sequence = [[ 1 , 0 , 0 , 0 ],[ 0 , 1 , 0 , 0 ],[ 0 , 0 , 1 , 0 ],[ 0 , 0 , 0 , 1 ]] while True : for step in sequence : for i in range ( len ( pins )): pins [ i ] . value ( step [ i ]) sleep ( 1 ) I set the probe to 10X sensitivity: And here's what I got on the oscilloscope: A clear step signal, at just over 60Hz. Time is on the X-axis and voltage is on the Y-axis. Then when the oscilloscope starts receiving a signal I press Auto adjust, and the scope detects how the signal looks and adjusts the time scale so the signal is steady on the screen. I used the portable digital oscilloscope to troubleshoot my Output Devices board: I found erratic signals coming from one of the H-bridges, which caused my brushless motor to move erratically. More info is here . Using a multimeter In Computer-Controlled Cutting week, I measured a resistor with a multimeter: Here the circuit needs to be powered off. The resistor says 1002, and it indeed measures as 10kOhm. In Output Devices week, I measured the voltage that an OLED gets from the power supply: Here the circuit needs to be powered on. The OLED gets roughly 5V, as it shuould be. The OLED can operate on 3.3V-5V voltage. I also measured the current that the OLED draws: Here I need to break the circuit and put the current meter into the circuit, so that the current flows through it. Inside the current meter is a resistor with a very small, known resistance. The voltage drop over the resistor is measured and from that, the device calculates the current. On the right a bigger part of the screen is turned on, and it shows in the current measurement. .md-content__button { display: none; }","title":"6. Electronics Design"},{"location":"assignments/week06.html#electronics-design","text":"","title":"Electronics Design"},{"location":"assignments/week06.html#designing-a-simple-board-in-kicad","text":"Here's a video where I describe what I did in Electronics Design week to Neil Gershenfeld during random review. A few months ago I went through a short KiCAD tutorial, to get to know the program a little bit. But I got a lot more out of designing a little PCB with an LED and a button and making it on the vinyl cutter this week. Designing something without making it doesn't get me excited to do more stuff. After making the little circuit, I really want to make more. Let's get into it.","title":"Designing a simple board in KiCAD   "},{"location":"assignments/week06.html#the-fab-library","text":"I cloned the Fab Lab parts library for KiCAD from the Gitlab repository. It's maintained by Krisjanis Rijnieks in Finland. To use the library, I needed to upgrade to KiCAD 7. I started by going into Preferences and Manage Symbol Libraries. There I clicked the + symbol to import a new library and navigated to the location of the fab library, which is called fab.kicad_sym. Then I went into Preferences and Manage Footprint Libraries and did the same, but that file is called fab.pretty. KiCAD has many parts in its libraries, but the fab library has been the most useful by far.","title":"The fab library"},{"location":"assignments/week06.html#the-schematic","text":"Having imported all the components in the Fab Lab Inventory , I could get started with my design. I created a new design in a new folder. In the Schematic editor I pressed A on the keyboard to add components, and added a 1206 diode, a 1206 resistor to go with it, the Xiao RP2040 module and a pushbutton. I also added a power symbol and a ground symbol. Then I used the wire tool on the right side of the interface to connect the components together in a way that made sense to me. My first schematic.","title":"The schematic"},{"location":"assignments/week06.html#routing-the-pcb","text":"Next, I opened the PCB Editor. I placed the components there all at once. They were connected by thin lines called a rat's nest, but the routing of the physical traces requires a second step. I moved the parts into a pleasing arrangement and then connected them together with the wire tool in the toolbar on the right. After selecting Update PCB from Schematic, I got the parts all in a bundle. Then I realized that I probably had the wrong button, so I swapped it out. Then I arranged the parts in a pleasing way routed traces between them by pressing X on the keyboard. Arranged and routed.","title":"Routing the PCB"},{"location":"assignments/week06.html#3d-viewer","text":"Then I tried the 3D Viewer (View -> 3D Viewer) and got a warning that I needed to define a board outline. Board outline missing. So I added a board outline to Edge Cuts. A day or two earlier, my elder son Ernir said that he wanted to make an electric thing in the shape of a circle, and close it off on all sides except one, so that we could connect it to another circle. So I made the outline a circle. Circular board outline. I moved the components around some more until I was happy. Then I pressed Alt + 3 on the keyboard to get a 3D preview. That was underwhelming, since none of the components I used have a 3D model associated with them. I wanted at least to have the Xiao module, so that I could design an enclosure around its USB connector (if I had the time). I found the model on the SeeedStudio web site . It includes a Fusion 360 design file, but I only need the STEP file. I put the STEP file into the fab.3dshapes folder in the fab library and used this tutorial to connect it to the KiCAD footprint. Now we're talking! I can see the 3D model being useful when designing enclosures.","title":"3D Viewer"},{"location":"assignments/week06.html#a-few-fixes","text":"I exported the design as an SVG and opened it in Inkscape. It wasn't until I saw it there that I realized that the traces were too thin to cut on the vinyl cutter. I needed to go back into the PCB Editor and use a quick and dirty way to change the track width. I selected one part of each trace, then pressed U to select the whole trace and then pressed E to open the trace properties. There I could change the trace width from 0.25 mm to 1 mm. That's better. Next time I'll set the track width from the start in the proper way using Netclasses in File -> Board Setup. That's much more convenient, since every new trace will have the width that I've defined. This is how you set the trace width properly for the whole board. Now I have a completed design in the PCB editor, ready to export to Inkscape: Xiao RP2040 LED board with a button. Here are the KiCAD files for this project, including the PDF file that is ready for cutting: Download KiCAD project","title":"A few fixes"},{"location":"assignments/week06.html#selecting-a-resistor-for-the-led","text":"Now I needed to find the right resistor to use with the blue LED. I found a very useful tutorial from Digikey, which is where I got my parts from. Diodes are a one-way valve for electrons. Electrons can go one way, but not the other. When diodes let current through, they have almost no resistance, and that's great. Then someone discovered that diodes can emit light. Now LEDs are everywhere and they have one caveat: You can't just connect one to a power source and have it work. Because it's such a good conductor, it will let more current through that it can handle. It will burn up, or even blow up! So we need a helper component. Every LED needs a current limiting resistor in order to survive. So, how do we select the right resistor for our blue LED? Let's use Ohm's Law: Ohm's Law pyramid. Coincidentally, VIR means 'wire' in Icelandic. It's fun to be able to make graphics like these in Inkscape and render them easily on a web page. Ok, I need to know the Source voltage: 3.3 V LED forward voltage: 3.2 V (max 3.5 V) LED's recommended operating current: 30 mA But there's a catch: After reading the datasheet, I know that RP2040 can only supply 16 mA from each GPIO pin. So that's the number I'll use. The voltage drop across the resistor needs to be \\[V_{source}-V_{forward}\\] \\[3.3 V - 3.2 V = 0.1 V\\] Now let's use the pyramid to get an expression for the resistor: \\[R = \\frac{V}{I} = \\frac{0.1V}{0.016A} = 6.25 \u03a9\\] I have the resistance! But there's one more step: I need to make sure that the resistor can take the heat. I'll use the power formula to see the power dissipated by the resistor: \\[P = I\u22c5V\\] \\[ = 0.016 A \u22c5 0.1 V \\] \\[ = 0.0016 W \\] \\[ = 1.6 mW \\] That's insignificant. At the Fab Lab I have 4.99\u03a9 and 10\u03a9 resistors. Let's check the current through the LED: 4.99\u03a9: \\[I = \\frac{V}{R} = \\frac{0.1V}{4.99\u03a9} = 0.02A \\] 10\u03a9: \\[I = \\frac{V}{R} = \\frac{0.1V}{10\u03a9} = 0.01A\\] OK, I'll use the 10\u03a9 resistor.","title":"Selecting a resistor for the  LED"},{"location":"assignments/week06.html#using-an-oscilloscope","text":"I programmed a Raspberry Pi Pico to output stepper motor control signals. This is the same MicroPython code as I used in Embedded Programming week. It's a very simple way of creating signals for a 28BYJ-48 stepper motor . from machine import Pin from time import sleep IN1 = Pin ( 2 , Pin . OUT ) IN2 = Pin ( 3 , Pin . OUT ) IN3 = Pin ( 4 , Pin . OUT ) IN4 = Pin ( 5 , Pin . OUT ) pins = [ IN1 , IN2 , IN3 , IN4 ] sequence = [[ 1 , 0 , 0 , 0 ],[ 0 , 1 , 0 , 0 ],[ 0 , 0 , 1 , 0 ],[ 0 , 0 , 0 , 1 ]] while True : for step in sequence : for i in range ( len ( pins )): pins [ i ] . value ( step [ i ]) sleep ( 1 ) I set the probe to 10X sensitivity: And here's what I got on the oscilloscope: A clear step signal, at just over 60Hz. Time is on the X-axis and voltage is on the Y-axis. Then when the oscilloscope starts receiving a signal I press Auto adjust, and the scope detects how the signal looks and adjusts the time scale so the signal is steady on the screen. I used the portable digital oscilloscope to troubleshoot my Output Devices board: I found erratic signals coming from one of the H-bridges, which caused my brushless motor to move erratically. More info is here .","title":"Using an oscilloscope"},{"location":"assignments/week06.html#using-a-multimeter","text":"In Computer-Controlled Cutting week, I measured a resistor with a multimeter: Here the circuit needs to be powered off. The resistor says 1002, and it indeed measures as 10kOhm. In Output Devices week, I measured the voltage that an OLED gets from the power supply: Here the circuit needs to be powered on. The OLED gets roughly 5V, as it shuould be. The OLED can operate on 3.3V-5V voltage. I also measured the current that the OLED draws: Here I need to break the circuit and put the current meter into the circuit, so that the current flows through it. Inside the current meter is a resistor with a very small, known resistance. The voltage drop over the resistor is measured and from that, the device calculates the current. On the right a bigger part of the screen is turned on, and it shows in the current measurement. .md-content__button { display: none; }","title":"Using a multimeter"},{"location":"assignments/week07.html","text":"Computer-Controlled Machining Design I designed an extended table with a shelf for our coffee corner. I wondered how I could make the shelf supports look good: Version 1 Version 2 Version 3 I ended up picking version 3. Here's an example of how convenient parametric design can be: And here's the whole design process. First I create the parts and adjust their sizes, then I model the press-fit dogbone joints and finally I move the parts into one plane and nest them manually to fit onto the plywood plate that I have. I need to create the plywood plate as a part in the model, so that I can use it as the stock when setting up the machining toolpaths. Computer-Aided Machining I start by creating a Setup. Under the Stock tab, I select the body that represents my plywood sheet: It's good practice to name the bodies in the model. It makes things easier. Then in the Setup tab, I set up the work coordinate system. I set the zero point to be the bottom left corner of the sheet corner, looking at it from above (see the image above). The top surface is the Z reference. Close-up of how I define the work origin for the Shopbot. Now I start creating machining operations. I had to create the chamfering tool in Fusion, it's not complicated. Cutter tab Cutting data tab You can select the type of milling bit from a drop down list and then specify its dimensions according to the bit that you have. In the Cutting data tab you also input the feeds and speeds for this bit. \u00de\u00f3rarinn recommended that for plywood I would choose a spindle speed of 14.000 rpm and a feed rate of 1000 mm/min. Then I chose a 2D contour milling operation and selected only the edges that I wanted chamfered. For some reason it worked better to split the operations up and select only one edge in each one. Under the Passes tab, uncheck the Stock to leave option. The first pass is the finishing pass. Geometry tab Heights tab Remember to select Stock bottom as the bottom plane. Next, I mill the dogbone press-fit holes. I select a 6 mm flat end mill with the same feeds and speeds as the chamfering tool. I created one 2D pocket operation for each dogbone pocket. The simulation looks good. I had to enlarge the circles in the dogbone to make sure that the milling bit can enter them. The last operation is to mill all the outline, using the same 6 mm flat endmill, again rotating at 14.000 rpm and moving at 1000 mm/min through the material: The outline milling operation. It's a 2D contour operation, like the chamfering. I had the machine leave rectangular shaped tabs, to keep the parts from moving around while finishing the milling operation. The ShopBot Turning the machine on To be able to turn on the spindle, you need to reset the machine controller. Before you do anything in the Shopbot control software, you need to open Tools -> Spindle control. Otherwise the software can't change the spindle speed as the GCode requires. Then you go into Cuts -> Spindle Warmup Routine, to spin the spindle at two speeds for 10 or 15 minutes, to warm up the bearings in the spindle. You go into Cuts -> Zero Z-axis w/ Zzero plate to zero the Z-axis with a conductive metal plate. Then you press K to get the KeyPad window and use the arrow keys on the keyboard to move the spindle to a convenient point before you go into Zero -> Zero [2] axes (X & Y) in the software. Turning on the shop vac just before starting the job. Recalibrating the ShopBot When I made a fit test with two slightly different clearances, I discovered that both of them were way off the mark. I asked my instructor \u00de\u00f3rarinn what might cause this and he suggested that the gearboxes on the stepper motors might be wearing out. This may result in the steps per inch value changing. So I modeled a 100 mm by 100 mm square with a circular pocket in the middle. My instructor \u00de\u00f3rarinn suggested that I also chamfer the corners, so that I could make diagonal measurements. The resulting square had 100.5 mm sides. That means that I need to use this scaling factor on the steps per inch value. I can change that value in the Unit Values window in the Shopbot control software. \\[ \\frac{100}{100.5} = 0.9950249 \\] Corrected value: \\[ 0.9950 * 97.7487 = \\underline{97.262389} \\; \\textnormal{steps/inch} \\] steps/inch. Let's change the unit values for the X and Y axes in the ShopBot control software: The X and Y unit values were 97.7487 and then I changed them to 97.2624. Now our ShopBot is more accurate. Test pieces I designed a test fit piece with different clearances in Fusion 360. These fit tests gave me the confidence to finish the design of the coffee corner and mill all the parts in one go. Hexagon fit test pieces. I also did a chamfering test, here's the CAM simulation: I'm glad I also did this test, because I had selected a chamfering tool that was too small. Both are 90\u00b0 bits, but one is bigger than the other. And that makes a difference when you carve this deep. Milling the parts for the coffee corner It was exciting to do sophisticated milling like this with confidence. I fixed the plywood sheet with screws in the corners and on the middle of the long sides. I took care to tie my hair into a bun and wear hearing protection and safety glasses. First I milled just the chamfers into the plywood sheet. Then I changed from the chamfer tool to the 6 mm flat end mill and milled the dogbone pockets. After that operation it still didn't look like anything recognizable. Finally I milled the outlines and then I recognized the parts. Top view after milling. Dislodging the sawdust with a screwdriver. Vaccuuming the sawdust. Breaking the tab. Breaking the tabs with a chisel. Sanding the edges. I chamfered a few corners before hammering the parts together, to make sure that they sit flush against each other. My instructor \u00de\u00f3rarinn suggested that I take a little off the corners to compensate for the inner corner radius in the piece that gets hammered into these holes. Hammering the parts together. They fit tightly together and don't require any glue. Our new documentation station. I really get in the zone when I sit here and write documentation. I think it's because I feel like I'm in a caf\u00e9, and that's where I get the best concentration. Look how serious I am! The next time I mill something out of wood using Fusion 360, I may need to use a different tab shape. Look at the burn marks after the machine stopped to mill this tab: There are burn marks where all the tabs were. While I was hard at work finishing up the documentation, my instructor \u00de\u00f3rarinn painted the table black and attached it and the shelf to the wall. Bas Withagen in Fab Lab Reykjav\u00edk used to say that if you have time to paint your final project, you're doing it wrong. Fortunately, I have my trusty instructor \u00de\u00f3rarinn, who painted the table black. He's there with his dog S\u00f3la, who was old and had become blind from diabetes. She laid at my desk as I worked on the Fab Academy documentation. S\u00f3la only lived for a week after I took this picture. Such a sweet dog. Our lovely coffee corner with a freshly painted table and \u00de\u00f3rarinn's espresso machine. \u00de\u00f3rarinn commented that now there was really no spot in the lab that wasn't cozy and enticing to sit down and work on a laptop. Download Coffee Corner Fusion 360 model Download 12 mm fit test Fusion 360 model Download 12 mm fit test - more clearance Fusion 360 model Download 12 mm hexagon test Fusion 360 model Download 15 mm fit test Fusion 360 model Download 15 mm chamfer test Fusion 360 model Download rectangle circle test VCarve file Download rectangle circle test - quarter inch bit VCarve file Download rectangle circle test - 22mm MDF VCarve file .md-content__button { display: none; }","title":"7. Computer-Controlled Machining"},{"location":"assignments/week07.html#computer-controlled-machining","text":"","title":"Computer-Controlled Machining   "},{"location":"assignments/week07.html#design","text":"I designed an extended table with a shelf for our coffee corner. I wondered how I could make the shelf supports look good: Version 1 Version 2 Version 3 I ended up picking version 3. Here's an example of how convenient parametric design can be: And here's the whole design process. First I create the parts and adjust their sizes, then I model the press-fit dogbone joints and finally I move the parts into one plane and nest them manually to fit onto the plywood plate that I have. I need to create the plywood plate as a part in the model, so that I can use it as the stock when setting up the machining toolpaths.","title":"Design"},{"location":"assignments/week07.html#computer-aided-machining","text":"I start by creating a Setup. Under the Stock tab, I select the body that represents my plywood sheet: It's good practice to name the bodies in the model. It makes things easier. Then in the Setup tab, I set up the work coordinate system. I set the zero point to be the bottom left corner of the sheet corner, looking at it from above (see the image above). The top surface is the Z reference. Close-up of how I define the work origin for the Shopbot. Now I start creating machining operations. I had to create the chamfering tool in Fusion, it's not complicated. Cutter tab Cutting data tab You can select the type of milling bit from a drop down list and then specify its dimensions according to the bit that you have. In the Cutting data tab you also input the feeds and speeds for this bit. \u00de\u00f3rarinn recommended that for plywood I would choose a spindle speed of 14.000 rpm and a feed rate of 1000 mm/min. Then I chose a 2D contour milling operation and selected only the edges that I wanted chamfered. For some reason it worked better to split the operations up and select only one edge in each one. Under the Passes tab, uncheck the Stock to leave option. The first pass is the finishing pass. Geometry tab Heights tab Remember to select Stock bottom as the bottom plane. Next, I mill the dogbone press-fit holes. I select a 6 mm flat end mill with the same feeds and speeds as the chamfering tool. I created one 2D pocket operation for each dogbone pocket. The simulation looks good. I had to enlarge the circles in the dogbone to make sure that the milling bit can enter them. The last operation is to mill all the outline, using the same 6 mm flat endmill, again rotating at 14.000 rpm and moving at 1000 mm/min through the material: The outline milling operation. It's a 2D contour operation, like the chamfering. I had the machine leave rectangular shaped tabs, to keep the parts from moving around while finishing the milling operation.","title":"Computer-Aided Machining"},{"location":"assignments/week07.html#the-shopbot","text":"","title":"The ShopBot"},{"location":"assignments/week07.html#turning-the-machine-on","text":"To be able to turn on the spindle, you need to reset the machine controller. Before you do anything in the Shopbot control software, you need to open Tools -> Spindle control. Otherwise the software can't change the spindle speed as the GCode requires. Then you go into Cuts -> Spindle Warmup Routine, to spin the spindle at two speeds for 10 or 15 minutes, to warm up the bearings in the spindle. You go into Cuts -> Zero Z-axis w/ Zzero plate to zero the Z-axis with a conductive metal plate. Then you press K to get the KeyPad window and use the arrow keys on the keyboard to move the spindle to a convenient point before you go into Zero -> Zero [2] axes (X & Y) in the software. Turning on the shop vac just before starting the job.","title":"Turning the machine on"},{"location":"assignments/week07.html#recalibrating-the-shopbot","text":"When I made a fit test with two slightly different clearances, I discovered that both of them were way off the mark. I asked my instructor \u00de\u00f3rarinn what might cause this and he suggested that the gearboxes on the stepper motors might be wearing out. This may result in the steps per inch value changing. So I modeled a 100 mm by 100 mm square with a circular pocket in the middle. My instructor \u00de\u00f3rarinn suggested that I also chamfer the corners, so that I could make diagonal measurements. The resulting square had 100.5 mm sides. That means that I need to use this scaling factor on the steps per inch value. I can change that value in the Unit Values window in the Shopbot control software. \\[ \\frac{100}{100.5} = 0.9950249 \\] Corrected value: \\[ 0.9950 * 97.7487 = \\underline{97.262389} \\; \\textnormal{steps/inch} \\] steps/inch. Let's change the unit values for the X and Y axes in the ShopBot control software: The X and Y unit values were 97.7487 and then I changed them to 97.2624. Now our ShopBot is more accurate.","title":"Recalibrating the ShopBot"},{"location":"assignments/week07.html#test-pieces","text":"I designed a test fit piece with different clearances in Fusion 360. These fit tests gave me the confidence to finish the design of the coffee corner and mill all the parts in one go. Hexagon fit test pieces. I also did a chamfering test, here's the CAM simulation: I'm glad I also did this test, because I had selected a chamfering tool that was too small. Both are 90\u00b0 bits, but one is bigger than the other. And that makes a difference when you carve this deep.","title":"Test pieces"},{"location":"assignments/week07.html#milling-the-parts-for-the-coffee-corner","text":"It was exciting to do sophisticated milling like this with confidence. I fixed the plywood sheet with screws in the corners and on the middle of the long sides. I took care to tie my hair into a bun and wear hearing protection and safety glasses. First I milled just the chamfers into the plywood sheet. Then I changed from the chamfer tool to the 6 mm flat end mill and milled the dogbone pockets. After that operation it still didn't look like anything recognizable. Finally I milled the outlines and then I recognized the parts. Top view after milling. Dislodging the sawdust with a screwdriver. Vaccuuming the sawdust. Breaking the tab. Breaking the tabs with a chisel. Sanding the edges. I chamfered a few corners before hammering the parts together, to make sure that they sit flush against each other. My instructor \u00de\u00f3rarinn suggested that I take a little off the corners to compensate for the inner corner radius in the piece that gets hammered into these holes. Hammering the parts together. They fit tightly together and don't require any glue. Our new documentation station. I really get in the zone when I sit here and write documentation. I think it's because I feel like I'm in a caf\u00e9, and that's where I get the best concentration. Look how serious I am! The next time I mill something out of wood using Fusion 360, I may need to use a different tab shape. Look at the burn marks after the machine stopped to mill this tab: There are burn marks where all the tabs were. While I was hard at work finishing up the documentation, my instructor \u00de\u00f3rarinn painted the table black and attached it and the shelf to the wall. Bas Withagen in Fab Lab Reykjav\u00edk used to say that if you have time to paint your final project, you're doing it wrong. Fortunately, I have my trusty instructor \u00de\u00f3rarinn, who painted the table black. He's there with his dog S\u00f3la, who was old and had become blind from diabetes. She laid at my desk as I worked on the Fab Academy documentation. S\u00f3la only lived for a week after I took this picture. Such a sweet dog. Our lovely coffee corner with a freshly painted table and \u00de\u00f3rarinn's espresso machine. \u00de\u00f3rarinn commented that now there was really no spot in the lab that wasn't cozy and enticing to sit down and work on a laptop. Download Coffee Corner Fusion 360 model Download 12 mm fit test Fusion 360 model Download 12 mm fit test - more clearance Fusion 360 model Download 12 mm hexagon test Fusion 360 model Download 15 mm fit test Fusion 360 model Download 15 mm chamfer test Fusion 360 model Download rectangle circle test VCarve file Download rectangle circle test - quarter inch bit VCarve file Download rectangle circle test - 22mm MDF VCarve file .md-content__button { display: none; }","title":"Milling the parts for the coffee corner"},{"location":"assignments/week08.html","text":"Electronics Production Vinyl cutting electronics Making a vinyl cutting file in Inkscape I had to go into Inkscape and do a bit of editing of the SVG that I exported out of KiCAD in Electronics Design week . The traces were only single lines, so I couldn't just set the line width to 0.02 mm and start cutting. I used a little trick; I converted the stroke to a path. These lines won't work. Stroke to Path. Now I have lots of lines. Too many, even. But I can work with these. Now I had a lot of intersecting shapes, which I was able to combine using Path -> Union. After that, it was only a bit of cleaning up, since there was an extra rectangle left over on each pad. Path -> Union. Removing duplicate lines. I made the lines red, set the line width to 0.02 mm and exported to PDF (File -> Save As and then select PDF as the output). Cutting the circuit Cutting the circuit didn't go perfectly. Some of the pads came loose when the cutter was cutting holes in them. Checking the design in Inkscape, I found that there was an extra circle on top of every hole. So each hole was cut twice. I also realized that I didn't need to solder pins onto the Xiao module, I could surface-mount it! So I deleted all the holes and cut again. This time the circuit came out perfect. I love making circuits on the vinyl cutter! It's so quick and easy. I found a MicroPython blink program to run on the RP2040. All I had to do was to look at the pinout on the Xiao module to see which GPIO pin I had connected to the LED. That's pin 26. I substituted that into the program and pressed play. It works! Here's the blink program in MicroPython: from machine import Pin , Timer led = machine . Pin ( 26 , machine . Pin . OUT ) timer = Timer () def blink ( timer ): led . toggle () timer . init ( freq = 2.5 , mode = Timer . PERIODIC , callback = blink ) I also tried programming it in C++ in the Arduino IDE. Here is my board with the button working: And the Arduino code: #define BUTTON_PIN 29 #define LED_PIN 26 bool status = 0 ; void setup () { Serial . begin ( 9600 ); pinMode ( BUTTON_PIN , INPUT_PULLUP ); pinMode ( LED_PIN , OUTPUT ); } void loop () { status = digitalRead ( BUTTON_PIN ); Serial . println ( status ); if ( status == 1 ) { digitalWrite ( LED_PIN , LOW ); } else { digitalWrite ( LED_PIN , HIGH ); } delay ( 100 ); } PCB milling I decided to make an LED debugging board. I've wanted something like this for a while. Sometimes you just want to see if something is happening on the output pin that you've defined. Or you're not sure which pin is which. I think an LED board can help when you're figuring out if the problem is in the circuit or in the code. So I designed a board with 30 LEDs, which covers the whole length of my breadboard: Here I'm placing a simple pattern of LEDs, each with a current limiting resistor. Most of the work took place in the PCB layout environment. I needed to rotate every LED individually and align it and its resistor with the others. I had to do a fair bit of rotating and arranging to get all the resistors and LEDs in line. Then I selected File -> Export SVG and opened the SVG file in Inkscape. Unlike the vinyl cutting file, which needs to be a perfect SVG, what I'm using now is a PNG. So I only need to set the colors of the board and traces right, and that's it! Export to PNG with 1000 dots per inch resolution. The production files are simple PNG images. You can save these and load them into Fab Modules to make your own LED test board. The left one (traces) is milled with a 1/64\" bit and the right one (interior) is milled with a 1/32\" bit to cut the outline of the board. To mill the traces I took the milling bit out of the collet and put the 1/64 inch bit in. Initially the bit should only be poking a little bit out of the collet. When over the PCB and the Z-axis is in zero position, I loosened the milling bit, taking care to keep it from dropping and breaking. Then I lower the bit onto the PCB, thereby zeroing it. I push it gently down with one finger while I tighten the set screw, otherwise the screw can lift the bit slightly as I fasten it. The traces PNG is loaded in Fab Modules, RML code for Roland Modela mills is selected and PCB traces is selected. We have Fab Modules running locally on an ancient Linux machine and I don't know how to take screenshots on it. Sorry. Then I select the MDX-20 milling machine, click Calculate to generate the toolpath and click Send to send the RML code to the machine. Starting to mill the traces on the Roland MDX-20 machine. Vacuuming the dust away after milling. Fab Modules selections for milling the PCB traces. Generating the toolpath to mill the PCB outline. The interior PNG was loaded this time and PCB outline selected instead of PCB traces. Did you know you can rotate the view in 3D in Fab Modules? It's great! Milling the board outline with a 1/32 inch bit. I changed the bit with the same procedure as before. After another round of vacuuming, the board is ready for soldering! Soldering, then stopping to test the resistor sizing. First I tried a 1kOhm resistor, which made the LED too dim. Then I tried a 10Ohm resistor, which was just right. Then I soldered the rest of the resistors and LEDs onto the board. I enjoyed the process, but if I were to make many of these boards, I would start thinking about a Pick-and-Place machine. The LumenPNP seems nice. LED debugging I used the LED to troubleshoot my final project. First I made a simple program that blinks all the pins. It helped me to determine which pin on the IC relates to which pin number in the Arduino IDE, using the Arduino Nano 33 IoT board definition (because that's the only Arduino core that worked with my libraries). Blink all the pins! I noticed gaps, where a few pins weren't broken out on the board. I could identify them by slowing the blinking down and having the SAMD21 report the pin number to the serial monitor. Then, when the brushless motor was moving erratically, I added the LED board to the breadboard again to the breadboard to see if I was getting PWM signals on all three motor phases: I got a steady ENABLE signal and two PWM signals. There should also be a PWM signal on the brown wire. I had accidentally used a pin that wasn't broken out on the board for the third PWM signal. I quickly changed the pin in the code and the motor spun smoothly. See more in Final Project: Electronics Design . PCB milling test 1/64th inch bit I made Neil Gershenfeld's PCB milling test using the 1/64th inch flat end mill. I used a local instance of the old Fab Modules running on a Debian Linux laptop. I used the standard speed of 4 mm/s and a cut depth of 0.1 mm. Under \"number of offsets\" I put -1, which means that I want to clear the board completely of copper around the traces. The toolpaths in Fab Modules. I wonder how Neil programmed this from scratch. I think these traces came out rather well The depth of cut seems to be good since the copper is completely removed. The edges are relatively smooth, so the milling bit must be in good condition. The very finest traces came loose from the board. That's good to know. It seems to be best not to make traces thinner than 0.3 mm. 0.01 inch bit Then I tried the super thin 0.01 inch flat end mill, and I must admit that I forgot to change the milling speed. So the first attempt was at a fast pace of 4 mm/s. The end mill broke immediately. Then I tried again at a slow speed of 0.5 mm/s and the same cut depth 0.1 mm. It also broke quite quickly. This was frustrating. There are more offsets, since the milling bit is thinner. I broke two 0.01 inch milling bits trying to mill this test file. I waited until the final project to try the 0.01 inch end mill again, then at the very slow speed of 0.1 mm/s. It worked for an hour and then broke in the middle of the night. I documented my frustration in my final project video and in my final project presentation, Neil Gershenfeld mentioned that everything has to be perfect for this milling bit to work. You have to plane the wasteboard, clean the machine, everything has to be just right. And I think I also made the mistake of having it mill all the traces, instead of just around the ICs with the smallest pads. In the end I was able to mill the finest traces on my final project board with a V-bit. Then I cleared the whole board with a 1/64th inch flat end mill and milled the holes and outline with a 1/32 inch flat end mill. Here is an assembled robot joint running a PID control loop: Look at that! It works! .md-content__button { display: none; }","title":"8. Electronics Production"},{"location":"assignments/week08.html#electronics-production","text":"","title":"Electronics Production   "},{"location":"assignments/week08.html#vinyl-cutting-electronics","text":"","title":"Vinyl cutting electronics"},{"location":"assignments/week08.html#making-a-vinyl-cutting-file-in-inkscape","text":"I had to go into Inkscape and do a bit of editing of the SVG that I exported out of KiCAD in Electronics Design week . The traces were only single lines, so I couldn't just set the line width to 0.02 mm and start cutting. I used a little trick; I converted the stroke to a path. These lines won't work. Stroke to Path. Now I have lots of lines. Too many, even. But I can work with these. Now I had a lot of intersecting shapes, which I was able to combine using Path -> Union. After that, it was only a bit of cleaning up, since there was an extra rectangle left over on each pad. Path -> Union. Removing duplicate lines. I made the lines red, set the line width to 0.02 mm and exported to PDF (File -> Save As and then select PDF as the output).","title":"Making a vinyl cutting file in Inkscape"},{"location":"assignments/week08.html#cutting-the-circuit","text":"Cutting the circuit didn't go perfectly. Some of the pads came loose when the cutter was cutting holes in them. Checking the design in Inkscape, I found that there was an extra circle on top of every hole. So each hole was cut twice. I also realized that I didn't need to solder pins onto the Xiao module, I could surface-mount it! So I deleted all the holes and cut again. This time the circuit came out perfect. I love making circuits on the vinyl cutter! It's so quick and easy. I found a MicroPython blink program to run on the RP2040. All I had to do was to look at the pinout on the Xiao module to see which GPIO pin I had connected to the LED. That's pin 26. I substituted that into the program and pressed play. It works! Here's the blink program in MicroPython: from machine import Pin , Timer led = machine . Pin ( 26 , machine . Pin . OUT ) timer = Timer () def blink ( timer ): led . toggle () timer . init ( freq = 2.5 , mode = Timer . PERIODIC , callback = blink ) I also tried programming it in C++ in the Arduino IDE. Here is my board with the button working: And the Arduino code: #define BUTTON_PIN 29 #define LED_PIN 26 bool status = 0 ; void setup () { Serial . begin ( 9600 ); pinMode ( BUTTON_PIN , INPUT_PULLUP ); pinMode ( LED_PIN , OUTPUT ); } void loop () { status = digitalRead ( BUTTON_PIN ); Serial . println ( status ); if ( status == 1 ) { digitalWrite ( LED_PIN , LOW ); } else { digitalWrite ( LED_PIN , HIGH ); } delay ( 100 ); }","title":"Cutting the circuit"},{"location":"assignments/week08.html#pcb-milling","text":"I decided to make an LED debugging board. I've wanted something like this for a while. Sometimes you just want to see if something is happening on the output pin that you've defined. Or you're not sure which pin is which. I think an LED board can help when you're figuring out if the problem is in the circuit or in the code. So I designed a board with 30 LEDs, which covers the whole length of my breadboard: Here I'm placing a simple pattern of LEDs, each with a current limiting resistor. Most of the work took place in the PCB layout environment. I needed to rotate every LED individually and align it and its resistor with the others. I had to do a fair bit of rotating and arranging to get all the resistors and LEDs in line. Then I selected File -> Export SVG and opened the SVG file in Inkscape. Unlike the vinyl cutting file, which needs to be a perfect SVG, what I'm using now is a PNG. So I only need to set the colors of the board and traces right, and that's it! Export to PNG with 1000 dots per inch resolution. The production files are simple PNG images. You can save these and load them into Fab Modules to make your own LED test board. The left one (traces) is milled with a 1/64\" bit and the right one (interior) is milled with a 1/32\" bit to cut the outline of the board. To mill the traces I took the milling bit out of the collet and put the 1/64 inch bit in. Initially the bit should only be poking a little bit out of the collet. When over the PCB and the Z-axis is in zero position, I loosened the milling bit, taking care to keep it from dropping and breaking. Then I lower the bit onto the PCB, thereby zeroing it. I push it gently down with one finger while I tighten the set screw, otherwise the screw can lift the bit slightly as I fasten it. The traces PNG is loaded in Fab Modules, RML code for Roland Modela mills is selected and PCB traces is selected. We have Fab Modules running locally on an ancient Linux machine and I don't know how to take screenshots on it. Sorry. Then I select the MDX-20 milling machine, click Calculate to generate the toolpath and click Send to send the RML code to the machine. Starting to mill the traces on the Roland MDX-20 machine. Vacuuming the dust away after milling. Fab Modules selections for milling the PCB traces. Generating the toolpath to mill the PCB outline. The interior PNG was loaded this time and PCB outline selected instead of PCB traces. Did you know you can rotate the view in 3D in Fab Modules? It's great! Milling the board outline with a 1/32 inch bit. I changed the bit with the same procedure as before. After another round of vacuuming, the board is ready for soldering! Soldering, then stopping to test the resistor sizing. First I tried a 1kOhm resistor, which made the LED too dim. Then I tried a 10Ohm resistor, which was just right. Then I soldered the rest of the resistors and LEDs onto the board. I enjoyed the process, but if I were to make many of these boards, I would start thinking about a Pick-and-Place machine. The LumenPNP seems nice.","title":"PCB milling"},{"location":"assignments/week08.html#led-debugging","text":"I used the LED to troubleshoot my final project. First I made a simple program that blinks all the pins. It helped me to determine which pin on the IC relates to which pin number in the Arduino IDE, using the Arduino Nano 33 IoT board definition (because that's the only Arduino core that worked with my libraries). Blink all the pins! I noticed gaps, where a few pins weren't broken out on the board. I could identify them by slowing the blinking down and having the SAMD21 report the pin number to the serial monitor. Then, when the brushless motor was moving erratically, I added the LED board to the breadboard again to the breadboard to see if I was getting PWM signals on all three motor phases: I got a steady ENABLE signal and two PWM signals. There should also be a PWM signal on the brown wire. I had accidentally used a pin that wasn't broken out on the board for the third PWM signal. I quickly changed the pin in the code and the motor spun smoothly. See more in Final Project: Electronics Design .","title":"LED debugging"},{"location":"assignments/week08.html#pcb-milling-test","text":"","title":"PCB milling test"},{"location":"assignments/week08.html#164th-inch-bit","text":"I made Neil Gershenfeld's PCB milling test using the 1/64th inch flat end mill. I used a local instance of the old Fab Modules running on a Debian Linux laptop. I used the standard speed of 4 mm/s and a cut depth of 0.1 mm. Under \"number of offsets\" I put -1, which means that I want to clear the board completely of copper around the traces. The toolpaths in Fab Modules. I wonder how Neil programmed this from scratch. I think these traces came out rather well The depth of cut seems to be good since the copper is completely removed. The edges are relatively smooth, so the milling bit must be in good condition. The very finest traces came loose from the board. That's good to know. It seems to be best not to make traces thinner than 0.3 mm.","title":"1/64th inch bit"},{"location":"assignments/week08.html#001-inch-bit","text":"Then I tried the super thin 0.01 inch flat end mill, and I must admit that I forgot to change the milling speed. So the first attempt was at a fast pace of 4 mm/s. The end mill broke immediately. Then I tried again at a slow speed of 0.5 mm/s and the same cut depth 0.1 mm. It also broke quite quickly. This was frustrating. There are more offsets, since the milling bit is thinner. I broke two 0.01 inch milling bits trying to mill this test file. I waited until the final project to try the 0.01 inch end mill again, then at the very slow speed of 0.1 mm/s. It worked for an hour and then broke in the middle of the night. I documented my frustration in my final project video and in my final project presentation, Neil Gershenfeld mentioned that everything has to be perfect for this milling bit to work. You have to plane the wasteboard, clean the machine, everything has to be just right. And I think I also made the mistake of having it mill all the traces, instead of just around the ICs with the smallest pads. In the end I was able to mill the finest traces on my final project board with a V-bit. Then I cleared the whole board with a 1/64th inch flat end mill and milled the holes and outline with a 1/32 inch flat end mill. Here is an assembled robot joint running a PID control loop: Look at that! It works! .md-content__button { display: none; }","title":"0.01 inch bit"},{"location":"assignments/week09.html","text":"Output Devices Neil's board I first made Neil Gershenfeld's Hello H-Bridge D11C board without modifications. I simply downloaded the traces and interior PNG files and used them to generate G-code to mill the board. After milling, I removed the extra copper around the USB connector, to avoid a short-circuit. Neil's board in Fab Modules. I like soldering. Maybe you can tell. I like the fact that the SAMD11 is a capable ARM microcontroller that still comes in a package that is easy to solder. After soldering, I plugged the USB connector on the board into my computer and hooked the SWD pins up to the Atmel-ICE programmer. I followed this tutorial from Fab Lab Kannai to try to upload a bootloader to the chip. Trying to program the ATSAMD11C with the official ATMEL-ICE programmer. The microcontroller was not detected. The Microchip Studio software couldn't find a connected device. I tried again, and got a low voltage warning. It said that the operating voltage was 1.5 volts but needed to be 1.6 to 3.8 volts. Well, that's an improvement! Now I have a useful error message that I may be able to do something about. Later. At least the computer is detecting something. Stepper control board Fusion 360 PCB export woes For my own design, I decided to go an easier route. I used the Xiao RP2040 module, which I know how to program. After the incredible fireworks show in the Monday recitation , where the Fab Lab machine builders showed their projects, I looked into the Urumbu boards and Modular Things. Urumbu is based on work at a Fab event in Kerala, where it was discovered that you can connect several stepper control boards directly to a computer and send them synced commands via USB and have them act as a single machine. Modular Things grew out of that project, and they include a convenient web editor to program the boards with a few lines of JavaScript. I looked at the Urumbu boards and found that they used the DRV8428 stepper controllers. We only have two of them, and they are currently unavailable at Digikey. However, the latest Modular Things boards use a Xiao RP2040 for control, which I'm comfortable with, and the stepper board uses two A4950 motor controllers, which we have at our Lab. Alright! These boards are designed in Fusion 360 and I want to make some modifications to it. I opened the stepper board in Fusion 360 but I couldn't make heads or tails of the interface of the electronics environment. So I started going through a friendly five video tutorial series on Fusion 360 PCB design and milling. The first video covers making a new design, getting the libraries you need and opening the PCB layout. The video was made only a year ago but the interface has already changed since then. But I was able to follow it. In the second video you make your own library component from scratch. I needed to change the default grid units from mils to millimeters first. That is a global setting, so I should now see millimeters everywhere in the electronics design environment. In the third video, you make a more complicated component from the manufacturer's 3D CAD file and technical drawing. When I made a new component, I had to set the grid units to mm again. Annoying. I followed step 5 on this Fab Academy site to export the circuit as a PNG. That worked well for the traces, but no matter what I tried, I couldn't export the outline of the board. It's always visible in the viewport, even when I turn off all the layers. So instead, I tried opening the 3D PCB and exporting the PCB sketch as a DXF and then turning that into a black and white SVG in Inkscape. That works, except I need to draw a frame around it, so that the milling bit has space to traverse the whole outline. But then, how do I make the frame for the traces? I tried to export them as a DXF, but that didn't work. For that, I would need to create another sketch inside the 3D PCB environment and project the traces into it, but that environment only allows you to make one sketch. Then I tried to make an engineering drawing. Only the board outline and the pads appeared but not the traces. And not the frame around the board, because it only exists in a sketch. Then I changed the settings for the model view from Visible Lines to Visible with Hidden Edges, and the traces appeared! But they had broken lines. So I right-clicked the Document Settings at the top of the model tree on the left and looked at the View settings. The hidden lines were drawn with HIDDEN2. I changed that to Continuous, and now I had nice and continuous traces. I exported the drawing as a PDF and opened it in Inkscape. I deleted the CAD drawing frame, which I don't need to make production files. Now I just needed to do some editing to combine the traces with the pads and make one file with only the traces and another one with only the holes and the board outline. I made all the lines red and 0.02 mm wide, just because that's what we usually do before laser cutting and vinyl cutting. I'm used to looking at things like that. I tried turning on Fill, but that didn't come out well. So I had to do some editing. I selected Object -> Ungroup twice to separate all the lines. I thought that I was getting close to production. But a lot of the lines weren't connected. Sigh. I can't use this. Next I tried exporting the CAD drawing as a DXF and opened it in Inkscape, in the hope that it would be more precise. It is more precise, but the lines are still all seperate. The pads and traces aren't objects that I can combine. I tried turning Fill on, but had no better luck than with the PDF. To make the background, I made a rectangle with black fill and no stroke, and had it snap to the corners of the broken sketch outline. Nicely milled stepper control board. When I was looking through the Arduino code for the stepper H-bridge RP2040, I found the pin that sends a reference voltage to the Toshiba driver. It was in the stepperDriver.cpp file. Connected to the pin was a somewhat cryptic slice_num variable, but from the RP2040 datasheet I remembered that the PWM generators are called slices. From the following lines of code, it seemed that the PWM duty cycle was 15/128, or 12%: cpp // PWM duty cycle over 128 pwm_set_chan_level(slice_num_a, channel_a, 15); pwm_set_chan_level(slice_num_b, channel_b, 15); If I assume that the maximum output voltage of the RP2040 is 3.3V, then 12% of that is 0,396V. I can try to implement this with the Arduino library function analogWrite(pin, value), where value takes a number from 0 (always off) to 255 (always on). 12% of 255 is 31. The DC motor hummed, but didn't move. So I tried 50/255. Then it moved a tiny bit. Next, I tried 80/255. The motor spins! And I don't seem to be overloading the USB port. But the motor is very weak. Let's try 100. Now 120. Now 150. Now 180. Now 200. Now 220. I won't dare to go above that for now. Two DC motors, VREF 80. Works! Let's try 120. I tried 150, 180 and finally 220. I also tried PWM on both motors at the same time. That worked well. Now I'm gearing up for a BLDC motor. But that requires a boost converter to get the voltage from 5 to 12 V. Final project spiral 1 Then I tried Yuichi Tamiya's Modular Things stepper board. Yuichi Tamiya's Modular Things stepper board from the 2023 instructor bootcamp in Amsterdam. I heavily modified Yuichi's board, changing the shape and adding header pins, so that I could use all the Xiao RP2040's pins. I can now connect the brushless motor to the two H-bridges (it needs one and a half H-bridge) and I can connect the encoder to 3.3 V, ground and a digital pin on the Xiao. I bought two sizes of brushless motors, thinking that I would use a bigger motor in the base. I based the shape of the board on that. A few days after I designed the shape, I decided to change the orientation of the arm from vertical (regular robot arm) to horizontal (SCARA arm). Then there's no strain on the motors when the arm is stationary and I don't need to use the bigger and more expensive brushless motor. I also decided to put a stepper motor in the base of the first version of the robot, simply because I had managed to make a working stepper RP2040 Modular Thing. The Autorouter in KiCAD. My PCB design My robot joint v1 PCB. I also added one header pin to the current sense resistor, hoping that I can read the current going into the motor. That would be very useful, because it's a way to prevent the H-bridges from overheating (I burned a motor driver on a commercial robot arm once and I want to make it impossible on my arm) and I can also use the measured current as a way to measure the force on the joint. Current sensing is not available on any hobby robot that I know of, so if this works, then it will be a great feature! I also added a 7-11V power input for the brushless motor. Yuichi's stepper board uses the 5V USB pin to power the stepper, but my brushless motor needs a higher voltage. I will just be using a lab power supply for now. I will figure out the arm's power supply later. Does it make sense to add a boost converter? I don't know, converting 230V AC into 5V and then converting 5V into 11V sounds a bit messy to me. Putting Dupont connectors on the motor The power connector that came with the motor is too small for the standard 2.54 mm pin headers in the Fab Lab Inventory, so my instructor \u00de\u00f3rarinn showed me how to crimp Dupont connectors onto the wires. Part 1 Part 2 Part 3 Part 4 Part 5 Small connector. \u00de\u00f3rarinn's Dupont connector kit. Aligning a female Dupont connector to the wire. The first crimp connection grabs the plastic cover and the second one grabs the bare wire and secures an electrical connection. Crimping the connector onto the wire. More recently I've started to use narrow nose pliers instead. Then I can control exactly how the crimping goes and I don't waste as many Dupont connectors. Triple Dupont connector, ready for service. PCB production Under number of offsets (off screen) I typed -1, to have the milling machine clear all the excess copper off the board. I thought this was the safest move, since I'll be putting a BLDC motor with an aluminum chassis onto the board. That's a nice-looking board. The components for arm joint v1, with a general comment on component labeling. Scaling problem The holes for the brushless motor screws were too far apart. How could that be? I exported the arm profile with the holes directly to DXF from Fusion 360, imported them into KiCAD and then exported to SVG without modifications. My instructor \u00de\u00f3rarinn suggested that my DPI settings in Inkscape and Fab Modules might be off. If you check the Fab Modules image, you'll see that the resolution was automatically set to 999.99 dots per inch, instead of 1000. Oh no, torn motor pins! I tore the motor pins off the board when I was trying to insert the connector. The copper also came off the board. This was a design lesson: you have to put the connectors all the way at the edge of the board! I don't know what I was thinking. This was very frustrating. I had to stop working, cool off and come back the next day. With a level head, I thought that I might actually be able to save this board using the adhesive-backed copper sheet that I use on the vinyl cutter. The fix Part 1 Part 2 Part 3 Part 4 Part 5 First I cut the MOTOR OUTPUT and CURRENT SENSE letters off the board with a box cutter. Then I tried cutting a strip of copper sheet and I successfully glued it onto the board. Copper sheet added for the other three motor phases. Then I carefully soldered the horizontal header pins onto the copper sheet and made a solder bridge from the sheets to the traces on the board. Finally I added some hot glue to add a little bit of strength. Robot base The 3D printed base for spiral 1 of my robot arm. The support material came easily away from the part in one piece. Neat! Here's the Stepper RP2040 Modular Thing that I made for the stepper in the base of the arm. Look closely and you'll see the tiny white TPU washers that I made avoid making contact between the screws and the traces. Stepper control with my board Driving a stepper from the Modular Things web interface using my arm joint control board. When testing my arm joint v1 with a stepper motor, I accidentally ripped the stepper motor pin header off the board and took some of the traces along with it. A current sense header pin also fell off the board. I decied to call it quits with making stuff for the day, went to the Heimabygg\u00f0 coffe house and wrote up my experiences. With fresh eyes (and a fresh espresso) at the lab the next morning, I thought of a way to fix the board. I would cut strips of adhesive-backed copper sheet and glue new traces onto the board. I soldered them to the remains of the old traces on one end and to the header pins on the other end, and after a bit of troubleshooting, the board worked! I've tried 247, 427, 274, 472, 742, 724 - that covers all possible \\(3! = 6\\) combinations. I'm getting PWM output on Xiao pins 0, 2 and 4. Now I know that the right pins are 7, 2 and 4. I get good PWM output for the motor from pins 2 and 4 but I get the strange sawtooth output from pin 7. LED PWM test Before trying to move the brushless motor, I checked whether I was getting a sinusoidal PWM on three output pins. I did this by outputting the motor control signals to the RGB LED that is built into the Xiao RP2040 board: Seems to be working! BLDC control with my board Robot arm spiral 1. Here I'm controlling the BLDC with sinusoidal PWM signals: Getting some erratic behavior. This code worked with the L298N stepper driver. After trying a few different speeds and voltages, I finally got the motor to spin around in a circle in the last shot. Debugging The brushless motor moved erratically no matter what I tried. I wondered if I had soldered the wrong capacitors onto the board. I tried to measure them with a component tester: Trying to measure a capacitor with a component tester. I couldn't get a reading with the component tester. Eventually I decided that I must have put the right capacitors on the board because I was so systematic and methodical in soldering the board. Finally, I tried lowering the power supply voltage to 5V. The motor still worked. Then I switched the motor over to Yuichi's Modular Things stepper driver and found erratic behavior there too. It seems that this Toshiba motor driver just doesn't cut it. I then connected the motor to the ancient L298N double H-bridge and it worked! OK, so the Toshiba H-bridge is out and I need to look for an alternative. Looking at the signals from the H-bridges. The board can control a stepper just fine. When trying to control a brushless motor, one H-bridge is a problem. It's the one that has only one pin connected. It seems that these motor drivers don't have independent half-H-bridges, which is what I need for brushless motor control. I'm going to abandon this board. I also noticed a lot of compliance in the structure. It seems to stem mostly from the stepper coupling that I designed. This is something I can improve in the next spiral. See the arm bending here: Measuring the power of an output device I measured the power use of an OLED screen. First, I measured the voltage over the component. That means that one lead of the multimeter is on the \"hot\" side of the component and the other lead is connected to ground on the other side. The OLED must be powered on for the voltage measurement to work. Measuring the voltage that the OLED screen gets. Then I measured the current that the OLED screen uses. I needed to break the circuit and insert the multimeter into the circuit on the \"hot\" side, in order to measure the current flowing through the OLED. Inside the multimeter is a resistor with very low resistance. The multimeter measures the voltage drop over the resistor and uses that value and the resistance to calculate the current using Ohm's Law. On the left, the potentiometer is turned all the way down, so the bar is black. On the right the pot is turned all the way up, so the bar is white. There is a clear difference in the current reading. Measuring roughly the maximum current that the OLED screen uses. About 90% of the OLED screen is is illuminated here. To calculate the power consumption, I'll use the power formula: \\[ \\mathrm{P} = \\mathrm{V}\\!\\cdot\\!\\mathrm{I}\\] Potentiometer set to 0%: \\[\\mathrm{P_{0\\%}}=\\mathrm{V}\\!\\cdot\\!\\mathrm{I_{0\\%}}=4.6V\\!\\cdot\\!0.004A=\\underline{0.0184W}\\] Potentiometer set to 100%: \\[\\mathrm{P_{100\\%}} = \\mathrm{V}\\!\\cdot\\!\\mathrm{I_{100\\%}} = 4.6V\\!\\cdot\\!0.008A = \\underline{0.0368W}\\] Maximum OLED power consumption: \\[\\mathrm{P_{max}} = \\mathrm{V}\\!\\cdot\\!\\mathrm{I_{max}} = 4.6V\\!\\cdot\\!0.024A = \\underline{0.1104W}\\] .md-content__button { display: none; }","title":"9. Output Devices"},{"location":"assignments/week09.html#output-devices","text":"","title":"Output Devices   "},{"location":"assignments/week09.html#neils-board","text":"I first made Neil Gershenfeld's Hello H-Bridge D11C board without modifications. I simply downloaded the traces and interior PNG files and used them to generate G-code to mill the board. After milling, I removed the extra copper around the USB connector, to avoid a short-circuit. Neil's board in Fab Modules. I like soldering. Maybe you can tell. I like the fact that the SAMD11 is a capable ARM microcontroller that still comes in a package that is easy to solder. After soldering, I plugged the USB connector on the board into my computer and hooked the SWD pins up to the Atmel-ICE programmer. I followed this tutorial from Fab Lab Kannai to try to upload a bootloader to the chip. Trying to program the ATSAMD11C with the official ATMEL-ICE programmer. The microcontroller was not detected. The Microchip Studio software couldn't find a connected device. I tried again, and got a low voltage warning. It said that the operating voltage was 1.5 volts but needed to be 1.6 to 3.8 volts. Well, that's an improvement! Now I have a useful error message that I may be able to do something about. Later. At least the computer is detecting something.","title":"Neil's board"},{"location":"assignments/week09.html#stepper-control-board","text":"","title":"Stepper control board"},{"location":"assignments/week09.html#fusion-360-pcb-export-woes","text":"For my own design, I decided to go an easier route. I used the Xiao RP2040 module, which I know how to program. After the incredible fireworks show in the Monday recitation , where the Fab Lab machine builders showed their projects, I looked into the Urumbu boards and Modular Things. Urumbu is based on work at a Fab event in Kerala, where it was discovered that you can connect several stepper control boards directly to a computer and send them synced commands via USB and have them act as a single machine. Modular Things grew out of that project, and they include a convenient web editor to program the boards with a few lines of JavaScript. I looked at the Urumbu boards and found that they used the DRV8428 stepper controllers. We only have two of them, and they are currently unavailable at Digikey. However, the latest Modular Things boards use a Xiao RP2040 for control, which I'm comfortable with, and the stepper board uses two A4950 motor controllers, which we have at our Lab. Alright! These boards are designed in Fusion 360 and I want to make some modifications to it. I opened the stepper board in Fusion 360 but I couldn't make heads or tails of the interface of the electronics environment. So I started going through a friendly five video tutorial series on Fusion 360 PCB design and milling. The first video covers making a new design, getting the libraries you need and opening the PCB layout. The video was made only a year ago but the interface has already changed since then. But I was able to follow it. In the second video you make your own library component from scratch. I needed to change the default grid units from mils to millimeters first. That is a global setting, so I should now see millimeters everywhere in the electronics design environment. In the third video, you make a more complicated component from the manufacturer's 3D CAD file and technical drawing. When I made a new component, I had to set the grid units to mm again. Annoying. I followed step 5 on this Fab Academy site to export the circuit as a PNG. That worked well for the traces, but no matter what I tried, I couldn't export the outline of the board. It's always visible in the viewport, even when I turn off all the layers. So instead, I tried opening the 3D PCB and exporting the PCB sketch as a DXF and then turning that into a black and white SVG in Inkscape. That works, except I need to draw a frame around it, so that the milling bit has space to traverse the whole outline. But then, how do I make the frame for the traces? I tried to export them as a DXF, but that didn't work. For that, I would need to create another sketch inside the 3D PCB environment and project the traces into it, but that environment only allows you to make one sketch. Then I tried to make an engineering drawing. Only the board outline and the pads appeared but not the traces. And not the frame around the board, because it only exists in a sketch. Then I changed the settings for the model view from Visible Lines to Visible with Hidden Edges, and the traces appeared! But they had broken lines. So I right-clicked the Document Settings at the top of the model tree on the left and looked at the View settings. The hidden lines were drawn with HIDDEN2. I changed that to Continuous, and now I had nice and continuous traces. I exported the drawing as a PDF and opened it in Inkscape. I deleted the CAD drawing frame, which I don't need to make production files. Now I just needed to do some editing to combine the traces with the pads and make one file with only the traces and another one with only the holes and the board outline. I made all the lines red and 0.02 mm wide, just because that's what we usually do before laser cutting and vinyl cutting. I'm used to looking at things like that. I tried turning on Fill, but that didn't come out well. So I had to do some editing. I selected Object -> Ungroup twice to separate all the lines. I thought that I was getting close to production. But a lot of the lines weren't connected. Sigh. I can't use this. Next I tried exporting the CAD drawing as a DXF and opened it in Inkscape, in the hope that it would be more precise. It is more precise, but the lines are still all seperate. The pads and traces aren't objects that I can combine. I tried turning Fill on, but had no better luck than with the PDF. To make the background, I made a rectangle with black fill and no stroke, and had it snap to the corners of the broken sketch outline. Nicely milled stepper control board. When I was looking through the Arduino code for the stepper H-bridge RP2040, I found the pin that sends a reference voltage to the Toshiba driver. It was in the stepperDriver.cpp file. Connected to the pin was a somewhat cryptic slice_num variable, but from the RP2040 datasheet I remembered that the PWM generators are called slices. From the following lines of code, it seemed that the PWM duty cycle was 15/128, or 12%: cpp // PWM duty cycle over 128 pwm_set_chan_level(slice_num_a, channel_a, 15); pwm_set_chan_level(slice_num_b, channel_b, 15); If I assume that the maximum output voltage of the RP2040 is 3.3V, then 12% of that is 0,396V. I can try to implement this with the Arduino library function analogWrite(pin, value), where value takes a number from 0 (always off) to 255 (always on). 12% of 255 is 31. The DC motor hummed, but didn't move. So I tried 50/255. Then it moved a tiny bit. Next, I tried 80/255. The motor spins! And I don't seem to be overloading the USB port. But the motor is very weak. Let's try 100. Now 120. Now 150. Now 180. Now 200. Now 220. I won't dare to go above that for now. Two DC motors, VREF 80. Works! Let's try 120. I tried 150, 180 and finally 220. I also tried PWM on both motors at the same time. That worked well. Now I'm gearing up for a BLDC motor. But that requires a boost converter to get the voltage from 5 to 12 V.","title":"Fusion 360 PCB export woes"},{"location":"assignments/week09.html#final-project-spiral-1","text":"Then I tried Yuichi Tamiya's Modular Things stepper board. Yuichi Tamiya's Modular Things stepper board from the 2023 instructor bootcamp in Amsterdam. I heavily modified Yuichi's board, changing the shape and adding header pins, so that I could use all the Xiao RP2040's pins. I can now connect the brushless motor to the two H-bridges (it needs one and a half H-bridge) and I can connect the encoder to 3.3 V, ground and a digital pin on the Xiao. I bought two sizes of brushless motors, thinking that I would use a bigger motor in the base. I based the shape of the board on that. A few days after I designed the shape, I decided to change the orientation of the arm from vertical (regular robot arm) to horizontal (SCARA arm). Then there's no strain on the motors when the arm is stationary and I don't need to use the bigger and more expensive brushless motor. I also decided to put a stepper motor in the base of the first version of the robot, simply because I had managed to make a working stepper RP2040 Modular Thing. The Autorouter in KiCAD.","title":"Final project spiral 1"},{"location":"assignments/week09.html#my-pcb-design","text":"My robot joint v1 PCB. I also added one header pin to the current sense resistor, hoping that I can read the current going into the motor. That would be very useful, because it's a way to prevent the H-bridges from overheating (I burned a motor driver on a commercial robot arm once and I want to make it impossible on my arm) and I can also use the measured current as a way to measure the force on the joint. Current sensing is not available on any hobby robot that I know of, so if this works, then it will be a great feature! I also added a 7-11V power input for the brushless motor. Yuichi's stepper board uses the 5V USB pin to power the stepper, but my brushless motor needs a higher voltage. I will just be using a lab power supply for now. I will figure out the arm's power supply later. Does it make sense to add a boost converter? I don't know, converting 230V AC into 5V and then converting 5V into 11V sounds a bit messy to me.","title":"My PCB design"},{"location":"assignments/week09.html#putting-dupont-connectors-on-the-motor","text":"The power connector that came with the motor is too small for the standard 2.54 mm pin headers in the Fab Lab Inventory, so my instructor \u00de\u00f3rarinn showed me how to crimp Dupont connectors onto the wires. Part 1 Part 2 Part 3 Part 4 Part 5 Small connector. \u00de\u00f3rarinn's Dupont connector kit. Aligning a female Dupont connector to the wire. The first crimp connection grabs the plastic cover and the second one grabs the bare wire and secures an electrical connection. Crimping the connector onto the wire. More recently I've started to use narrow nose pliers instead. Then I can control exactly how the crimping goes and I don't waste as many Dupont connectors. Triple Dupont connector, ready for service.","title":"Putting Dupont connectors on the motor"},{"location":"assignments/week09.html#pcb-production","text":"Under number of offsets (off screen) I typed -1, to have the milling machine clear all the excess copper off the board. I thought this was the safest move, since I'll be putting a BLDC motor with an aluminum chassis onto the board. That's a nice-looking board. The components for arm joint v1, with a general comment on component labeling.","title":"PCB production"},{"location":"assignments/week09.html#scaling-problem","text":"The holes for the brushless motor screws were too far apart. How could that be? I exported the arm profile with the holes directly to DXF from Fusion 360, imported them into KiCAD and then exported to SVG without modifications. My instructor \u00de\u00f3rarinn suggested that my DPI settings in Inkscape and Fab Modules might be off. If you check the Fab Modules image, you'll see that the resolution was automatically set to 999.99 dots per inch, instead of 1000.","title":"Scaling problem"},{"location":"assignments/week09.html#oh-no-torn-motor-pins","text":"I tore the motor pins off the board when I was trying to insert the connector. The copper also came off the board. This was a design lesson: you have to put the connectors all the way at the edge of the board! I don't know what I was thinking. This was very frustrating. I had to stop working, cool off and come back the next day. With a level head, I thought that I might actually be able to save this board using the adhesive-backed copper sheet that I use on the vinyl cutter.","title":"Oh no, torn motor pins!"},{"location":"assignments/week09.html#the-fix","text":"Part 1 Part 2 Part 3 Part 4 Part 5 First I cut the MOTOR OUTPUT and CURRENT SENSE letters off the board with a box cutter. Then I tried cutting a strip of copper sheet and I successfully glued it onto the board. Copper sheet added for the other three motor phases. Then I carefully soldered the horizontal header pins onto the copper sheet and made a solder bridge from the sheets to the traces on the board. Finally I added some hot glue to add a little bit of strength.","title":"The fix"},{"location":"assignments/week09.html#robot-base","text":"The 3D printed base for spiral 1 of my robot arm. The support material came easily away from the part in one piece. Neat! Here's the Stepper RP2040 Modular Thing that I made for the stepper in the base of the arm. Look closely and you'll see the tiny white TPU washers that I made avoid making contact between the screws and the traces.","title":"Robot base"},{"location":"assignments/week09.html#stepper-control-with-my-board","text":"Driving a stepper from the Modular Things web interface using my arm joint control board. When testing my arm joint v1 with a stepper motor, I accidentally ripped the stepper motor pin header off the board and took some of the traces along with it. A current sense header pin also fell off the board. I decied to call it quits with making stuff for the day, went to the Heimabygg\u00f0 coffe house and wrote up my experiences. With fresh eyes (and a fresh espresso) at the lab the next morning, I thought of a way to fix the board. I would cut strips of adhesive-backed copper sheet and glue new traces onto the board. I soldered them to the remains of the old traces on one end and to the header pins on the other end, and after a bit of troubleshooting, the board worked! I've tried 247, 427, 274, 472, 742, 724 - that covers all possible \\(3! = 6\\) combinations. I'm getting PWM output on Xiao pins 0, 2 and 4. Now I know that the right pins are 7, 2 and 4. I get good PWM output for the motor from pins 2 and 4 but I get the strange sawtooth output from pin 7.","title":"Stepper control with my board"},{"location":"assignments/week09.html#led-pwm-test","text":"Before trying to move the brushless motor, I checked whether I was getting a sinusoidal PWM on three output pins. I did this by outputting the motor control signals to the RGB LED that is built into the Xiao RP2040 board: Seems to be working!","title":"LED PWM test"},{"location":"assignments/week09.html#bldc-control-with-my-board","text":"Robot arm spiral 1. Here I'm controlling the BLDC with sinusoidal PWM signals: Getting some erratic behavior. This code worked with the L298N stepper driver. After trying a few different speeds and voltages, I finally got the motor to spin around in a circle in the last shot.","title":"BLDC control with my board"},{"location":"assignments/week09.html#debugging","text":"The brushless motor moved erratically no matter what I tried. I wondered if I had soldered the wrong capacitors onto the board. I tried to measure them with a component tester: Trying to measure a capacitor with a component tester. I couldn't get a reading with the component tester. Eventually I decided that I must have put the right capacitors on the board because I was so systematic and methodical in soldering the board. Finally, I tried lowering the power supply voltage to 5V. The motor still worked. Then I switched the motor over to Yuichi's Modular Things stepper driver and found erratic behavior there too. It seems that this Toshiba motor driver just doesn't cut it. I then connected the motor to the ancient L298N double H-bridge and it worked! OK, so the Toshiba H-bridge is out and I need to look for an alternative. Looking at the signals from the H-bridges. The board can control a stepper just fine. When trying to control a brushless motor, one H-bridge is a problem. It's the one that has only one pin connected. It seems that these motor drivers don't have independent half-H-bridges, which is what I need for brushless motor control. I'm going to abandon this board. I also noticed a lot of compliance in the structure. It seems to stem mostly from the stepper coupling that I designed. This is something I can improve in the next spiral. See the arm bending here:","title":"Debugging"},{"location":"assignments/week09.html#measuring-the-power-of-an-output-device","text":"I measured the power use of an OLED screen. First, I measured the voltage over the component. That means that one lead of the multimeter is on the \"hot\" side of the component and the other lead is connected to ground on the other side. The OLED must be powered on for the voltage measurement to work. Measuring the voltage that the OLED screen gets. Then I measured the current that the OLED screen uses. I needed to break the circuit and insert the multimeter into the circuit on the \"hot\" side, in order to measure the current flowing through the OLED. Inside the multimeter is a resistor with very low resistance. The multimeter measures the voltage drop over the resistor and uses that value and the resistance to calculate the current using Ohm's Law. On the left, the potentiometer is turned all the way down, so the bar is black. On the right the pot is turned all the way up, so the bar is white. There is a clear difference in the current reading. Measuring roughly the maximum current that the OLED screen uses. About 90% of the OLED screen is is illuminated here. To calculate the power consumption, I'll use the power formula: \\[ \\mathrm{P} = \\mathrm{V}\\!\\cdot\\!\\mathrm{I}\\] Potentiometer set to 0%: \\[\\mathrm{P_{0\\%}}=\\mathrm{V}\\!\\cdot\\!\\mathrm{I_{0\\%}}=4.6V\\!\\cdot\\!0.004A=\\underline{0.0184W}\\] Potentiometer set to 100%: \\[\\mathrm{P_{100\\%}} = \\mathrm{V}\\!\\cdot\\!\\mathrm{I_{100\\%}} = 4.6V\\!\\cdot\\!0.008A = \\underline{0.0368W}\\] Maximum OLED power consumption: \\[\\mathrm{P_{max}} = \\mathrm{V}\\!\\cdot\\!\\mathrm{I_{max}} = 4.6V\\!\\cdot\\!0.024A = \\underline{0.1104W}\\] .md-content__button { display: none; }","title":"Measuring the power of an output device"},{"location":"assignments/week10.html","text":"Machine Week The Icelandic Machine Week team: Svavar, Andri and Hafey. Machine Week was fantastic. Check out our project page here: MACHINE WEEK PAGE For an intense three-day weekend, Andri S\u00e6mundsson , Hafey Viktor\u00eda Hallgr\u00edmsd\u00f3ttir and myself joined forces in Fab Lab Reykjav\u00edk to build a machine. Here's our presentation video: The build process and function of the TeaManator tea steeping machine. But it's better to watch the video with commentary from Hafey: Hafey presenting the TeaManator tea machine to Neil Gershenfeld and the Fab Academy class 2023. Neil liked how the machine is nicely finished and presented. Link to video. The clean aesthetics of the machine are thanks to Hafey, who designed and made the structure on the Shopbot and the laser cutter. She also made the logo on the vinyl cutter, which means that we used all the machines in the Fab Lab! Travel arrangements Our instructors, \u00c1rni Bj\u00f6rnsson and \u00de\u00f3rarinn Bjartur Brei\u00f0fj\u00f6r\u00f0 Gunnarsson came to support us during this intense session of work. \u00de\u00f3rarinn and I flew in from \u00cdsafj\u00f6r\u00f0ur and \u00c1rni flew in from Akureyri. Landing in \u00cdsafj\u00f6r\u00f0ur is more exhilarating than many people would like. \u00cdsafj\u00f6r\u00f0ur is one of the most difficult airports in Iceland. When the plane comes in, it needs to take a sharp U-turn at the bottom of the fjord before landing on the tarmac. People are often quite shocked, and I heard that two of the designers of the Dutch Fokker aircraft that used to fly this route came along for the ride once and commented that the aircraft was not designed for this kind of thing. If there's even a little bit of side wind toward the mountain, the flight is cancelled. Fortunately, the weather gods smiled upon us this time. Fab Lab Reykjav\u00edk On arriving in Fab Lab Reykjav\u00edk, I discovered that I was one of the orphaned 3D scans on display in the lobby. No long hair or beard back then. I made this with a Kinect 3D scanner many years ago with the help of Linda and Bas Withagen, soon after Fab Lab Reykjav\u00edk first opened. Then the Fab Lab was in Eddufell, in a small commercial center. I spent a lot of time there right after it opened. Good times. I wanted to take many more pictures inside Fab Lab Reykjav\u00edk, because they have designed and fabricated all sorts of clever solutions that I would like to replicate in Fab Lab \u00cdsafj\u00f6r\u00f0ur. But there was no time! Teamwork We were all super productive the whole time. We worked from nine in the mornings to about ten in the evenings, and we made the machine work just in time to go back home. I'm really happy with my teammates and our instructors. \u00c1rni, Svavar, Hafey, Andri and \u00de\u00f3rarinn. The concept Why is the machine called TeaManator? Because my instructor \u00de\u00f3rarinn thought we should use something distinctive as an end stop: He added the tongue in Blender. Andri made some concept art for our machine using a Generative Pre-trained Transformer: Andri used GPT-enabled Bing to generate the images. Spiral 1 My job was to make the motors work. The first spiral was to control a stepper motor using an Arduino Uno and an L298N double H-bridge motor controller. I also added a servo to the Arduino Uno. The ATMega328 chip on the Uno is a bit old and outdated, but the Arduino Uno is still the best documented and supported microcontroller board in the world. So it's a good place to get something basic working. Machine control spiral 1: An L298N stepper driver module on a breadboard with a SparkFun RedBoard, which is essentially the same thing as an Arduino Uno. Spiral 2 The second spiral was to make a custom board with a Xiao RP2040 microcontroller module, a DRV8825 StepStick motor controller, 12V input pins from a lab power supply and GPIO breakout pins for a servo and two buttons. In Machine Week I made my biggest leap forward in electronics design. I also developed my methodical method of breadboarding and testing the hardware and software, one element at a time, before transferring the design over to KiCAD. Machine control spiral 2 on a breadboard: A DRV8825 StepStick stepper driver module. I first connected it to the SparkFun RedBoard but when I had that working I switched to the Xiao RP2040 module. PCB milling The custom board that I made is an extension of spiral 2. The first board had mounting holes that were far too big and the board also came loose during milling. Bad board and good board. I input a radius for the mounting holes when I thought I was defining their diameter. So the holes are huge on the board on the left! And I didn't use enough double-sided tape to secure the PCB blank, so it shifted during the trace milling operation. After a redesign, the second attempt to mill the board went well. The board on the right is the final version of the TeaManator controller board. I forgot the drilling holes for the electrolytic bulk storage capacitor for the stepper driver, so I drilled those holes afterwards. Hand drilling the last two holes in the board. I only had through-hole female headers, so I bent their legs in order to be able to surface mount them. Bending the legs for surface mounting. The final TeaManator machine controller. I learned a lesson in considering which side through-hole components need to be on in order to touch the copper traces. Close-up of the machine controller, all connected up. The TeaManator 2000 tea steeping machine. Clean and elegant. The reality behind the facade. The USB hub (with AC adapter) and the big power supply are only there to supply 5V to the Xiao and 12V to the stepper driver, because we didn't have time to learn how to make a power supply. I realized then that I need to think seriously about a neat way to power my final project. Design files Here are the KiCAD, SVG and PNG files for the TeaManator machine control board. I'm also including all the incremental test code. Other design files are at the bottom of our group project page . Download TeaManator KiCAD Download tea_machine.ino (final code) Download machine_week_nema17_l298n_test.ino Download machine_week_nema17_l298n_servo_test.ino Download machine_week_nema17_and_servo.ino Download machine_week_xiao_servo.ino Download machine_week_nema17_drv8825_test.ino Download machine_week_nema17_drv8825_test-svavars_board.ino Download machine_week_nema17_and_servo-svavars_board.ino Download machine_week_button_test.ino Spiral 3 The first Stepper Modular Thing that Andri made. He had to do considerable editing to make the circuit fabricatable. After 3D printing all the parts for the linear motion axis , Andri worked tirelessly on the third spiral, which was to make a Modular Things stepper controller board . We were really excited about this solution, but this design really thin traces and two layers, making it difficult to make. Andri made lots of modifications to the design in Fusion 360 in order to make the traces thicker and reroute everything so that it fit on one layer. He successfully milled the board and soldered all the components to it, but it didn't work. Unfortunately, the motor drivers in the original design need a higher voltage than 5V to work. The designer, Quentin Bols\u00e9e was very responsive to our questions. Then our instructors \u00c1rni and \u00de\u00f3rarinn, recalled that Yuichi Tamiya at Fab Lab Kannai made a working version of the Modular Things stepper control board at the 2023 instructor bootcamp in Amsterdam. Andri proceeded to make the board and \u00c1rni soldered the components onto it. It worked on the first try! The second Stepper Modular Thing that Andri made. Wow! It's powered by the USB port! We didn't have time to integrate it into our machine, but I'm very glad that Andri and our instructors were able to make a working Modular Thing. Seeing the Stepper Modular Thing working and being able to make the machine control board successfully combined to give me the confidence to try to make a robot arm joint immediately when I arrived back in \u00cdsafj\u00f6r\u00f0ur. See more info in Output Devices week . Our instructors We are lucky to have these instructors. Here are some images to prove it: Left: \u00de\u00f3rarinn explaining some electronics to Hafey. Right: Andri showing \u00c1rni the teabag holder. Attention: Coffee for Svavar! Our instructors took good care of us during this intense work session. My instructor \u00de\u00f3rarinn also disassembled a stepper motor and showed us how it works: Left: The stepper rotor, with its many iron teeth visible. Right: The step signal that turns the rotor (two of the phases are connected to the oscilloscope). .md-content__button { display: none; }","title":"10. Machine Week"},{"location":"assignments/week10.html#machine-week","text":"The Icelandic Machine Week team: Svavar, Andri and Hafey. Machine Week was fantastic. Check out our project page here: MACHINE WEEK PAGE For an intense three-day weekend, Andri S\u00e6mundsson , Hafey Viktor\u00eda Hallgr\u00edmsd\u00f3ttir and myself joined forces in Fab Lab Reykjav\u00edk to build a machine. Here's our presentation video: The build process and function of the TeaManator tea steeping machine. But it's better to watch the video with commentary from Hafey: Hafey presenting the TeaManator tea machine to Neil Gershenfeld and the Fab Academy class 2023. Neil liked how the machine is nicely finished and presented. Link to video. The clean aesthetics of the machine are thanks to Hafey, who designed and made the structure on the Shopbot and the laser cutter. She also made the logo on the vinyl cutter, which means that we used all the machines in the Fab Lab!","title":"Machine Week   "},{"location":"assignments/week10.html#travel-arrangements","text":"Our instructors, \u00c1rni Bj\u00f6rnsson and \u00de\u00f3rarinn Bjartur Brei\u00f0fj\u00f6r\u00f0 Gunnarsson came to support us during this intense session of work. \u00de\u00f3rarinn and I flew in from \u00cdsafj\u00f6r\u00f0ur and \u00c1rni flew in from Akureyri. Landing in \u00cdsafj\u00f6r\u00f0ur is more exhilarating than many people would like. \u00cdsafj\u00f6r\u00f0ur is one of the most difficult airports in Iceland. When the plane comes in, it needs to take a sharp U-turn at the bottom of the fjord before landing on the tarmac. People are often quite shocked, and I heard that two of the designers of the Dutch Fokker aircraft that used to fly this route came along for the ride once and commented that the aircraft was not designed for this kind of thing. If there's even a little bit of side wind toward the mountain, the flight is cancelled. Fortunately, the weather gods smiled upon us this time.","title":"Travel arrangements"},{"location":"assignments/week10.html#fab-lab-reykjavik","text":"On arriving in Fab Lab Reykjav\u00edk, I discovered that I was one of the orphaned 3D scans on display in the lobby. No long hair or beard back then. I made this with a Kinect 3D scanner many years ago with the help of Linda and Bas Withagen, soon after Fab Lab Reykjav\u00edk first opened. Then the Fab Lab was in Eddufell, in a small commercial center. I spent a lot of time there right after it opened. Good times. I wanted to take many more pictures inside Fab Lab Reykjav\u00edk, because they have designed and fabricated all sorts of clever solutions that I would like to replicate in Fab Lab \u00cdsafj\u00f6r\u00f0ur. But there was no time!","title":"Fab Lab Reykjav\u00edk"},{"location":"assignments/week10.html#teamwork","text":"We were all super productive the whole time. We worked from nine in the mornings to about ten in the evenings, and we made the machine work just in time to go back home. I'm really happy with my teammates and our instructors. \u00c1rni, Svavar, Hafey, Andri and \u00de\u00f3rarinn.","title":"Teamwork"},{"location":"assignments/week10.html#the-concept","text":"Why is the machine called TeaManator? Because my instructor \u00de\u00f3rarinn thought we should use something distinctive as an end stop: He added the tongue in Blender. Andri made some concept art for our machine using a Generative Pre-trained Transformer: Andri used GPT-enabled Bing to generate the images.","title":"The concept"},{"location":"assignments/week10.html#spiral-1","text":"My job was to make the motors work. The first spiral was to control a stepper motor using an Arduino Uno and an L298N double H-bridge motor controller. I also added a servo to the Arduino Uno. The ATMega328 chip on the Uno is a bit old and outdated, but the Arduino Uno is still the best documented and supported microcontroller board in the world. So it's a good place to get something basic working. Machine control spiral 1: An L298N stepper driver module on a breadboard with a SparkFun RedBoard, which is essentially the same thing as an Arduino Uno.","title":"Spiral 1"},{"location":"assignments/week10.html#spiral-2","text":"The second spiral was to make a custom board with a Xiao RP2040 microcontroller module, a DRV8825 StepStick motor controller, 12V input pins from a lab power supply and GPIO breakout pins for a servo and two buttons. In Machine Week I made my biggest leap forward in electronics design. I also developed my methodical method of breadboarding and testing the hardware and software, one element at a time, before transferring the design over to KiCAD. Machine control spiral 2 on a breadboard: A DRV8825 StepStick stepper driver module. I first connected it to the SparkFun RedBoard but when I had that working I switched to the Xiao RP2040 module.","title":"Spiral 2"},{"location":"assignments/week10.html#pcb-milling","text":"The custom board that I made is an extension of spiral 2. The first board had mounting holes that were far too big and the board also came loose during milling. Bad board and good board. I input a radius for the mounting holes when I thought I was defining their diameter. So the holes are huge on the board on the left! And I didn't use enough double-sided tape to secure the PCB blank, so it shifted during the trace milling operation. After a redesign, the second attempt to mill the board went well. The board on the right is the final version of the TeaManator controller board. I forgot the drilling holes for the electrolytic bulk storage capacitor for the stepper driver, so I drilled those holes afterwards. Hand drilling the last two holes in the board. I only had through-hole female headers, so I bent their legs in order to be able to surface mount them. Bending the legs for surface mounting. The final TeaManator machine controller. I learned a lesson in considering which side through-hole components need to be on in order to touch the copper traces. Close-up of the machine controller, all connected up. The TeaManator 2000 tea steeping machine. Clean and elegant. The reality behind the facade. The USB hub (with AC adapter) and the big power supply are only there to supply 5V to the Xiao and 12V to the stepper driver, because we didn't have time to learn how to make a power supply. I realized then that I need to think seriously about a neat way to power my final project.","title":"PCB milling"},{"location":"assignments/week10.html#design-files","text":"Here are the KiCAD, SVG and PNG files for the TeaManator machine control board. I'm also including all the incremental test code. Other design files are at the bottom of our group project page . Download TeaManator KiCAD Download tea_machine.ino (final code) Download machine_week_nema17_l298n_test.ino Download machine_week_nema17_l298n_servo_test.ino Download machine_week_nema17_and_servo.ino Download machine_week_xiao_servo.ino Download machine_week_nema17_drv8825_test.ino Download machine_week_nema17_drv8825_test-svavars_board.ino Download machine_week_nema17_and_servo-svavars_board.ino Download machine_week_button_test.ino","title":"Design files"},{"location":"assignments/week10.html#spiral-3","text":"The first Stepper Modular Thing that Andri made. He had to do considerable editing to make the circuit fabricatable. After 3D printing all the parts for the linear motion axis , Andri worked tirelessly on the third spiral, which was to make a Modular Things stepper controller board . We were really excited about this solution, but this design really thin traces and two layers, making it difficult to make. Andri made lots of modifications to the design in Fusion 360 in order to make the traces thicker and reroute everything so that it fit on one layer. He successfully milled the board and soldered all the components to it, but it didn't work. Unfortunately, the motor drivers in the original design need a higher voltage than 5V to work. The designer, Quentin Bols\u00e9e was very responsive to our questions. Then our instructors \u00c1rni and \u00de\u00f3rarinn, recalled that Yuichi Tamiya at Fab Lab Kannai made a working version of the Modular Things stepper control board at the 2023 instructor bootcamp in Amsterdam. Andri proceeded to make the board and \u00c1rni soldered the components onto it. It worked on the first try! The second Stepper Modular Thing that Andri made. Wow! It's powered by the USB port! We didn't have time to integrate it into our machine, but I'm very glad that Andri and our instructors were able to make a working Modular Thing. Seeing the Stepper Modular Thing working and being able to make the machine control board successfully combined to give me the confidence to try to make a robot arm joint immediately when I arrived back in \u00cdsafj\u00f6r\u00f0ur. See more info in Output Devices week .","title":"Spiral 3"},{"location":"assignments/week10.html#our-instructors","text":"We are lucky to have these instructors. Here are some images to prove it: Left: \u00de\u00f3rarinn explaining some electronics to Hafey. Right: Andri showing \u00c1rni the teabag holder. Attention: Coffee for Svavar! Our instructors took good care of us during this intense work session. My instructor \u00de\u00f3rarinn also disassembled a stepper motor and showed us how it works: Left: The stepper rotor, with its many iron teeth visible. Right: The step signal that turns the rotor (two of the phases are connected to the oscilloscope). .md-content__button { display: none; }","title":"Our instructors"},{"location":"assignments/week11.html","text":"Input Devices IR sensor board I used the IR phototransistor from the fab library twice, to represent both the IR emitter and the phototransistor. For this board I'm using obsolete parts from the 2012 Fab Lab inventory that still work really well; the OP280KT IR emitter and the matched OP580 phototransistor. They are have a more square shape than their modern counterparts, but I checked the datasheets and their footprints are close enough to what I have in the KiCAD library now. My Xiao IR sensor schematic. I looked at the phototransistor board on the Xiao page of the Fab Academy web site of Adrian Torres. It has a 1kOhm resistor on the IR emitter diode and a 10kOhm resistor on the IR sensor. I did the same here. My Xiao PCB design for the IR emitter and sensor. Instead of soldering the Xiao onto the board I'm using pin sockets. The milling went well using Fab Modules and the Roland Modela MDX-20, but the edges of the traces are a little bit rough. That's a sign of wear on the 1/64 inch bit. Milling the traces of the IR sensor board. Then I milled the board outline with the 1/32 inch bit. Milling the outline of the IR sensor board. Here's how the board looks: The IR sensor board. With the Xiao SAMD21 on board. Here's a video of the sensor readings, it works really well: Measuring the analog signal I connected the signal from the IR sensor to the oscilloscope and got a reading that changed with the light hitting the sensor: Design files I used code from Adrian Torres to get readings in the serial plotter, but I changed the number of the analog read pin to A10. Here's my KiCAD project: Download Xiao IR sensor board And the PCB milling files for Fab Modules or Mods: The traces. The interior (for milling the board outline). And the Arduino code: Download IR sensor Arduino code Magnetic encoder The board that I made for output devices has three pins that are intended for encoder input. I connected the encoder and wrote a simple test program in the Arduino IDE. The AS5048 encoder has a three-pin PWM output and a five-pin SPI output. I used the simpler PWM output. It was easier to route the board for those connections. In this case, I use the pulseIn Arduino command to read the length of the pulses coming into the pin from the encoder. Then I print the value to the serial monitor. Here's the whole program: const int encoder = 3 ; // AMS AS5048 encoder int pos = 0 ; //mechanical position of shaft void setup () { Serial . begin ( 9600 ); pinMode ( encoder , INPUT ); } void loop () { pos = pulseIn ( encoder , HIGH ); //read encoder pulse Serial . print ( pos ); Serial . print ( '\\n' ); delay ( 10 ); } And here's a video of it running: Then I connected the current sensing resistor of each H-bridge to a pin on the Xiao RP2040. Using analogRead, I was able to measure the current going through the motor! There is one value for each H-bridge, and the values are similar. The values fluctuate, so they depend on when the measurement is made. It seems to work! Measuring the digital signal from the encoder I measured the SPI signals coming out of the AS5048 magnetic encoder. The first thing to measure is the clock signal. This is what synchronizes the SPI communication. The clock signal from the A5048 magnetic angle sensor was weak. I thought that maybe the magnetic encoder chip wasn't supposed to generate a clock signal. In SPI communication, there's only one main IC, and it generates the clock signal. So I connected the encoder up to a SAMD21 chip (and also connected the motor driver). With the microcontroller connected, the clock signal is strong. But its shape is surprising. I thought it would just be a regular square wave with no gaps. Next up is the chip select pin. The chip select pin is used to select between secondary SPI nodes, if more than one are connected to the same main microcontroller. This is how the chip select signal looks: The chip select signal in blue, overlaid onto the clock signal in yellow. Then there's the data, MOSI and MISO. The MOSI signal. I tried rotating the motor while it was on the screen. I didn't see any significant changes. But the angle still appeared in the serial monitor. The MISO signal. Some disturbances happened when I turned the motor, but I couldn't see how the signal was coming across the wire. .md-content__button { display: none; }","title":"11. Input Devices"},{"location":"assignments/week11.html#input-devices","text":"","title":"Input Devices   "},{"location":"assignments/week11.html#ir-sensor-board","text":"I used the IR phototransistor from the fab library twice, to represent both the IR emitter and the phototransistor. For this board I'm using obsolete parts from the 2012 Fab Lab inventory that still work really well; the OP280KT IR emitter and the matched OP580 phototransistor. They are have a more square shape than their modern counterparts, but I checked the datasheets and their footprints are close enough to what I have in the KiCAD library now. My Xiao IR sensor schematic. I looked at the phototransistor board on the Xiao page of the Fab Academy web site of Adrian Torres. It has a 1kOhm resistor on the IR emitter diode and a 10kOhm resistor on the IR sensor. I did the same here. My Xiao PCB design for the IR emitter and sensor. Instead of soldering the Xiao onto the board I'm using pin sockets. The milling went well using Fab Modules and the Roland Modela MDX-20, but the edges of the traces are a little bit rough. That's a sign of wear on the 1/64 inch bit. Milling the traces of the IR sensor board. Then I milled the board outline with the 1/32 inch bit. Milling the outline of the IR sensor board. Here's how the board looks: The IR sensor board. With the Xiao SAMD21 on board. Here's a video of the sensor readings, it works really well:","title":"IR sensor board"},{"location":"assignments/week11.html#measuring-the-analog-signal","text":"I connected the signal from the IR sensor to the oscilloscope and got a reading that changed with the light hitting the sensor:","title":"Measuring the analog signal"},{"location":"assignments/week11.html#design-files","text":"I used code from Adrian Torres to get readings in the serial plotter, but I changed the number of the analog read pin to A10. Here's my KiCAD project: Download Xiao IR sensor board And the PCB milling files for Fab Modules or Mods: The traces. The interior (for milling the board outline). And the Arduino code: Download IR sensor Arduino code","title":"Design files"},{"location":"assignments/week11.html#magnetic-encoder","text":"The board that I made for output devices has three pins that are intended for encoder input. I connected the encoder and wrote a simple test program in the Arduino IDE. The AS5048 encoder has a three-pin PWM output and a five-pin SPI output. I used the simpler PWM output. It was easier to route the board for those connections. In this case, I use the pulseIn Arduino command to read the length of the pulses coming into the pin from the encoder. Then I print the value to the serial monitor. Here's the whole program: const int encoder = 3 ; // AMS AS5048 encoder int pos = 0 ; //mechanical position of shaft void setup () { Serial . begin ( 9600 ); pinMode ( encoder , INPUT ); } void loop () { pos = pulseIn ( encoder , HIGH ); //read encoder pulse Serial . print ( pos ); Serial . print ( '\\n' ); delay ( 10 ); } And here's a video of it running: Then I connected the current sensing resistor of each H-bridge to a pin on the Xiao RP2040. Using analogRead, I was able to measure the current going through the motor! There is one value for each H-bridge, and the values are similar. The values fluctuate, so they depend on when the measurement is made. It seems to work!","title":"Magnetic encoder"},{"location":"assignments/week11.html#measuring-the-digital-signal-from-the-encoder","text":"I measured the SPI signals coming out of the AS5048 magnetic encoder. The first thing to measure is the clock signal. This is what synchronizes the SPI communication. The clock signal from the A5048 magnetic angle sensor was weak. I thought that maybe the magnetic encoder chip wasn't supposed to generate a clock signal. In SPI communication, there's only one main IC, and it generates the clock signal. So I connected the encoder up to a SAMD21 chip (and also connected the motor driver). With the microcontroller connected, the clock signal is strong. But its shape is surprising. I thought it would just be a regular square wave with no gaps. Next up is the chip select pin. The chip select pin is used to select between secondary SPI nodes, if more than one are connected to the same main microcontroller. This is how the chip select signal looks: The chip select signal in blue, overlaid onto the clock signal in yellow. Then there's the data, MOSI and MISO. The MOSI signal. I tried rotating the motor while it was on the screen. I didn't see any significant changes. But the angle still appeared in the serial monitor. The MISO signal. Some disturbances happened when I turned the motor, but I couldn't see how the signal was coming across the wire. .md-content__button { display: none; }","title":"Measuring the digital signal from the encoder"},{"location":"assignments/week12.html","text":"Molding and Casting The Guerilla guide to CNC and resing casting I came across the Guerrilla guide to CNC and resin casting by Michal Zalewski some months ago and was fascinated by the depth of technical information and practical skills displayed in the guide and in his Omnibot Mk II build . Look at that tiny DIY planetary gearbox ! His adventures in CNC and robotics are inspiring. And others have used his methods successfully; just look at this hybrid walker robot ( video ).So I took his recommendations and tried to order the main materials that he uses for his resin casting projects: Medium density modeling board, Quantum Silicones Q262 molding silicone and Innovative Polymers IE-3075 polyurethane for casting parts with excellent material properties. The problem was that no one wanted to sell them to me: This seemed like a dead end. Then I remembered a service I had used once before to order something obscure from a US based company that didn't ship to Iceland: ShopUSA . You can have things shipped to their warehouse in Virginia, and they forward your package to any country in the world. They shipped my polyurethane resin by air, which was convenient, but the silicone that I needed to make molds went by sea, because that package was above a weight limit. I also found someone on eBay who was selling relatively small pieces of medium density modeling board for milling. This is the stuff that's in all the ads from Roland, it's dimensionally stable, easy to machine and leaves a nice surface finish, not unlike a metal mold that has been glass bead blasted: I like this stuff. It's like machinable wax, but with a filler, so you get a uniform, slightly textured finish. It takes well to sanding. Mold pattern 1 modeling and toolpath generation I decided to make a button for the Frankenstein MCU that my instructor \u00de\u00f3rarinn is working on with Francisco Sanchez Arroyo . You push the button and your Fab Lab goes online. You can see the Fab Labs appear on the spinning globe , with connections forming between all the labs. It's a simple design, mainly meant to test the 3D spiral toolpath in Fusion 360 and the surface finish that is achievable with the modeling board. First I embossed the text and tried a parallel 3D toolpath in Fusion 360: I couldn't get the detail I wanted with this approach. Next, I used a single line font for the text and used the Project command to mill it into the curved surface of the button with a 3D strategy: The single line font, coupled with the Project milling strategy, worked really well . Milling mold pattern 1 I used a simple fixturing method that my instructor \u00de\u00f3rarinn showed me. I used the zeroing plate on the Shopbot. The milling chips look really nice when running adaptive clearing. Roughing pass being completed. Finishing pass looking good. Molding mold pattern 1 Since my silicone hadn't arrived, I got the help of my father-in-law, who is a recently retired dentist. He uses Impregum Penta from 3M to take impressions of teeth. It comes with a handy machine which mixes the two parts in the right proportions and dispenses the silicone. Still knows all the moves. This dental silicone is really viscous, so it's hard to prevent bubbles from being trapped in it when dispensing into the mold. Also, I didn't add draft angles to the geometry, so it was hard to get the silicone out, and I damaged the mold pattern: I thought the mold was shallow enough to not need a draft angle, but it really does need it. Mold pattern 2 Comparison between the profile sketches of the first and second mold pattern. Section analysis of first and second mold pattern. The second mold pattern toolpath. On the left are the feeds and speeds for the text engraving. My father-in-law put some Vaseline on the edge of the second mold pattern. That worked well. With the addition of the draft angles, it was much easier to get the silicone mold out of the modeling board. I used a syringe to put the silicone in this second mold pattern, so that I could get into all the little engraved letters. That strategy worked, but I still got bubbles in the silicone. A bit of Vaseline as mold release. This is the only picture I have of the syringe, because I needed both hands to apply the silicone. The second mold came out well, except for a few bubbles. With the help of my father-in-law, I made another mold from this pattern, but it turned out the same. The right silicone arrives Then my silicone arrived from Hobby Silicone! I also got a vacuum pot that my friend P\u00e1ll Einarsson made. That's a great thing to have when molding and casting! But I was disappointed when I plugged it in. I got no vacuum at all. If anything, the needle on the meter went up. I tried pressing down on the lid to seal it, but no luck. Then I felt air blowing out from under the lid. Aha! DIY vacuum pot. When Sigur\u00f0ur, the electrical teacher at the school came to the lab I got him to take a look at the three-phase plug with me. He showed me how to open it and he suggested that I try swapping the brown and black wires. That worked! The vacuum pump now blows in the right direction and I can degas the silicone and polyurethane. I ordered QM-262, which is quite stiff with Shore 60A hardness, and also Soft 107, which only has a 7A Shore hardness. I tried the softer one. One part blue catalyst to ten parts silicone. Mixing the silicone and scraping the sides of the cup until the color is uniform with no streaks. I mixed one part catalyst (5g) with ten parts silicone (50g) as per the manufacturer's instructions, and tried to shear it (Stir only laterally, not up and down, so as not to get bubbles in the mix). When the color was even I put the mix into the vacuum pot and let it sit for five minutes. A lot of bubbles surfaced and popped. I then let air back in and put the vacuum back on twice for two minutes. Then I poured the light blue mixed silicone into the mold pattern (after spraying it with Ultralease from Hobby Silicone). I poured in a thin stream into the same place the whole time, really slowly. Then I put it into the vacuum pot. A surprising number of bubbles came up. After a whole work day, I checked on the remains of the mixed silicone in the cup. It was still flowing freely. That was discouraging. My wife A\u00f0albj\u00f6rg worked for OK Prosthetics making prosthetic legs and silicone liners and she said that they heated up the aluminum molds to accelerate the curing process. Any remains took days to cure. They used to put the silicone mixing nozzles into an oven to be able to get the remains out and use the nozzle again quickly. She said that it depends on the chemistry and especially the catalyst, but she said that my silicone might still cure. Degassing the mold after pouring. After a week of waiting and then going to the Fab Lab Bootcamp in Neskaupsta\u00f0ur for another week, I came back and removed the mold from the master pattern. This silicone mold is very flexible. I'm very happy with the results. There are absolutely no bubbles in the mold and the text was captured perfectly. But I may need to contact the silicone manufacturer, because the datasheet says that it should cure in 24 hours. That's a nice-looking mold with no bubbles. Trying to mix polyurethane I made an attempt to mix the IE-3075 polyurethane to cast it in the silicone mold. The bottles said that I should agitate them before mixing, because they may have settled in storage. I probably shook them way too hard, because when I put the 1:1 by volume mixture into the vacuum pot, it foamed and immediately started to set. I made a rigid foam. Those can be useful as core materials in composites, but that's not what I intended to make. Casting wax in the mold Just to cast something in the mold, I tried candle wax. So I took the mold home and heated up an old candle. Heating the candle wax over a water bath. When all the wax had turned liquid and transparent, I poured it carefully into the mold. I may have heated the wax too quickly. As it gets hotter, the wax continues to expand according to its coefficient of thermal expansion. Then I poured it into the mold and it starts to cool down and contract. The greater the difference between these two temperatures, the worse the warping and wrinkles get. Thermal contraction during cooling results in wrinkles on the surface. Then I removed the cast wax part from the silicone mold. A small hand immediately grabbed the cast wax part after I removed it from the mold. The resulting part is not nearly as smooth as the mold is. Apparently the thermal contractions were so bad that this side got wrinkled too. The surface is all wrinkled after thermal contraction. But the text came out well. My little helper played around with the mold for a second. This resulted in the mold being bent all the way backwards and developing cracks. He wasn't trying to damage it, he was just being very four years old. This is a lesson for me, the mold was too thin in the middle. I was trying to save material and I thought I could get away with it. \"There is a crack in everything. That's how the light gets in,\" sang Leonard Cohen . Safety Data Sheets The silicones and the polyurethane that I ordered came with Safety Data Sheets. Soft 107 Silicone Soft 107 is a very soft and flexible two-component, room temperature condensation cure silicone. It has a durometer of 6. I guess they named it before they got the results back from the official durometer test. The condensation cure (or tin curing) means that this silicone probably shrinks a bit when curing. But the great thing about this material is that it's food safe (indirect contact). Because it's so soft, it's also ideal to make pads for pad printing. I've wanted to try pad printing for years, now I have what I need to do it. The silicone is mixed with a blue catalyst in a 10:1 silicone to catalyst ratio by weight. It's recommended to mix the silicone in a clean plastic container with a volume 3-4 times the volume of the silicone being mixed. Then there's space for it to expand when you put it in a vacuum pot to draw out the bubbles. If you don't have access to a vacuum chamber, they recommend a high pour: You pour into the bottom corner of the mold the whole time, keeping the mixing cup high enough above it to form a thin, steady stream of silicone. The bubbles will be pushed out before entering the thin stream and then the silcone mass slowly levels itself in the mold. This is what I did, and I also used a vacuum pot. The Soft 107 silicone may be harmful in contact with skin. Always use disposable gloves when handling it. Ensure good ventilation while working with it. Make sure that it doesn't leak into the environment. You must make sure that it cures before disposing of any remains. It's good to have a shower, an eyewash station and a ventilation system. It would be good for the Fab Lab to get an eyewash station. Rinse with plenty of water if you get the uncured material on your skin, in your eyes or into your mouth. Quantum Silicones QM 262 Silicone QM 262 is a two-component, room temperature addition cure (platinum cure) silicon material. The platinum cure means that there is essentially no shrinkage while curing. This silicone has excellent mechanical properties and is Michal Zalewski's favorite silicone for precision mold making . It has a high durometer of 60 Shore A. You mix it 10:1 silicone to blue catalyst by weight. The mixing instructions are the same as for the Soft 107 silicone material. The safety instructions are also the same as for the Soft 107 silicone. Innovative Polymers IE-3075 polyurethane This very strong polyurethane is mixed 1:1 by volume from an isocyanate and a polyol: RAKU TOOL IE-3075 Isocyanate Rinse with plenty of water and call a poison center if you get the uncured material on yourself. It may cause respiratory irritation and damage to organs through prolonged or repeated exposure. The material is suspected of causing cancer. Use protective glasses, glove, clothing and in case of inadequate ventilation, wear respiratory protection. Heating may cause an explosion. Make sure to completely cure the material before discarding the container. RAKU TOOL IE-3075 Polyol The safety procedures are very similar to the isocyanate, except the polyol is less flammable. There is much less information in its Safety Data Sheet. Ultralease URE GP Urethane Parfilm Mold Release Ultralease URE GP is a solvent-less and virtually odorless mold release. It's heat stable to 315\u00b0C, so it should also be good for injection molding. It doesn't interfere with painting, coating or bonding. It should last for several molding cycles. It's recommended for use with polyurethanes, silicones and rubbers. Wear eye protection, long sleeves, chemical resistant gloves and an organic vapor respirator. Also ensure proper ventilation. The can may explode if heated and the contents may displace oxygen and cause rapid suffocation. Keep away from heat, sparks, flames and hot surfaces. Do not pierce or burn the pressurized container, even after use. And definitely don't spray it onto an open flame. Rinse with plenty of water if you get it on yourself. And because it's in an aerosol can, contact with the rapidly expanding gas may cause burns or frostbite, as the gas removes heat from your skin to fuel its expansion. .md-content__button { display: none; }","title":"12. Molding and Casting"},{"location":"assignments/week12.html#molding-and-casting","text":"","title":"Molding and Casting   "},{"location":"assignments/week12.html#the-guerilla-guide-to-cnc-and-resing-casting","text":"I came across the Guerrilla guide to CNC and resin casting by Michal Zalewski some months ago and was fascinated by the depth of technical information and practical skills displayed in the guide and in his Omnibot Mk II build . Look at that tiny DIY planetary gearbox ! His adventures in CNC and robotics are inspiring. And others have used his methods successfully; just look at this hybrid walker robot ( video ).So I took his recommendations and tried to order the main materials that he uses for his resin casting projects: Medium density modeling board, Quantum Silicones Q262 molding silicone and Innovative Polymers IE-3075 polyurethane for casting parts with excellent material properties. The problem was that no one wanted to sell them to me: This seemed like a dead end. Then I remembered a service I had used once before to order something obscure from a US based company that didn't ship to Iceland: ShopUSA . You can have things shipped to their warehouse in Virginia, and they forward your package to any country in the world. They shipped my polyurethane resin by air, which was convenient, but the silicone that I needed to make molds went by sea, because that package was above a weight limit. I also found someone on eBay who was selling relatively small pieces of medium density modeling board for milling. This is the stuff that's in all the ads from Roland, it's dimensionally stable, easy to machine and leaves a nice surface finish, not unlike a metal mold that has been glass bead blasted: I like this stuff. It's like machinable wax, but with a filler, so you get a uniform, slightly textured finish. It takes well to sanding.","title":"The Guerilla guide to CNC and resing casting"},{"location":"assignments/week12.html#mold-pattern-1-modeling-and-toolpath-generation","text":"I decided to make a button for the Frankenstein MCU that my instructor \u00de\u00f3rarinn is working on with Francisco Sanchez Arroyo . You push the button and your Fab Lab goes online. You can see the Fab Labs appear on the spinning globe , with connections forming between all the labs. It's a simple design, mainly meant to test the 3D spiral toolpath in Fusion 360 and the surface finish that is achievable with the modeling board. First I embossed the text and tried a parallel 3D toolpath in Fusion 360: I couldn't get the detail I wanted with this approach. Next, I used a single line font for the text and used the Project command to mill it into the curved surface of the button with a 3D strategy: The single line font, coupled with the Project milling strategy, worked really well .","title":"Mold pattern 1 modeling and toolpath generation"},{"location":"assignments/week12.html#milling-mold-pattern-1","text":"I used a simple fixturing method that my instructor \u00de\u00f3rarinn showed me. I used the zeroing plate on the Shopbot. The milling chips look really nice when running adaptive clearing. Roughing pass being completed. Finishing pass looking good.","title":"Milling mold pattern 1"},{"location":"assignments/week12.html#molding-mold-pattern-1","text":"Since my silicone hadn't arrived, I got the help of my father-in-law, who is a recently retired dentist. He uses Impregum Penta from 3M to take impressions of teeth. It comes with a handy machine which mixes the two parts in the right proportions and dispenses the silicone. Still knows all the moves. This dental silicone is really viscous, so it's hard to prevent bubbles from being trapped in it when dispensing into the mold. Also, I didn't add draft angles to the geometry, so it was hard to get the silicone out, and I damaged the mold pattern: I thought the mold was shallow enough to not need a draft angle, but it really does need it.","title":"Molding mold pattern 1"},{"location":"assignments/week12.html#mold-pattern-2","text":"Comparison between the profile sketches of the first and second mold pattern. Section analysis of first and second mold pattern. The second mold pattern toolpath. On the left are the feeds and speeds for the text engraving. My father-in-law put some Vaseline on the edge of the second mold pattern. That worked well. With the addition of the draft angles, it was much easier to get the silicone mold out of the modeling board. I used a syringe to put the silicone in this second mold pattern, so that I could get into all the little engraved letters. That strategy worked, but I still got bubbles in the silicone. A bit of Vaseline as mold release. This is the only picture I have of the syringe, because I needed both hands to apply the silicone. The second mold came out well, except for a few bubbles. With the help of my father-in-law, I made another mold from this pattern, but it turned out the same.","title":"Mold pattern 2"},{"location":"assignments/week12.html#the-right-silicone-arrives","text":"Then my silicone arrived from Hobby Silicone! I also got a vacuum pot that my friend P\u00e1ll Einarsson made. That's a great thing to have when molding and casting! But I was disappointed when I plugged it in. I got no vacuum at all. If anything, the needle on the meter went up. I tried pressing down on the lid to seal it, but no luck. Then I felt air blowing out from under the lid. Aha! DIY vacuum pot. When Sigur\u00f0ur, the electrical teacher at the school came to the lab I got him to take a look at the three-phase plug with me. He showed me how to open it and he suggested that I try swapping the brown and black wires. That worked! The vacuum pump now blows in the right direction and I can degas the silicone and polyurethane. I ordered QM-262, which is quite stiff with Shore 60A hardness, and also Soft 107, which only has a 7A Shore hardness. I tried the softer one. One part blue catalyst to ten parts silicone. Mixing the silicone and scraping the sides of the cup until the color is uniform with no streaks. I mixed one part catalyst (5g) with ten parts silicone (50g) as per the manufacturer's instructions, and tried to shear it (Stir only laterally, not up and down, so as not to get bubbles in the mix). When the color was even I put the mix into the vacuum pot and let it sit for five minutes. A lot of bubbles surfaced and popped. I then let air back in and put the vacuum back on twice for two minutes. Then I poured the light blue mixed silicone into the mold pattern (after spraying it with Ultralease from Hobby Silicone). I poured in a thin stream into the same place the whole time, really slowly. Then I put it into the vacuum pot. A surprising number of bubbles came up. After a whole work day, I checked on the remains of the mixed silicone in the cup. It was still flowing freely. That was discouraging. My wife A\u00f0albj\u00f6rg worked for OK Prosthetics making prosthetic legs and silicone liners and she said that they heated up the aluminum molds to accelerate the curing process. Any remains took days to cure. They used to put the silicone mixing nozzles into an oven to be able to get the remains out and use the nozzle again quickly. She said that it depends on the chemistry and especially the catalyst, but she said that my silicone might still cure. Degassing the mold after pouring. After a week of waiting and then going to the Fab Lab Bootcamp in Neskaupsta\u00f0ur for another week, I came back and removed the mold from the master pattern. This silicone mold is very flexible. I'm very happy with the results. There are absolutely no bubbles in the mold and the text was captured perfectly. But I may need to contact the silicone manufacturer, because the datasheet says that it should cure in 24 hours. That's a nice-looking mold with no bubbles.","title":"The right silicone arrives"},{"location":"assignments/week12.html#trying-to-mix-polyurethane","text":"I made an attempt to mix the IE-3075 polyurethane to cast it in the silicone mold. The bottles said that I should agitate them before mixing, because they may have settled in storage. I probably shook them way too hard, because when I put the 1:1 by volume mixture into the vacuum pot, it foamed and immediately started to set. I made a rigid foam. Those can be useful as core materials in composites, but that's not what I intended to make.","title":"Trying to mix polyurethane"},{"location":"assignments/week12.html#casting-wax-in-the-mold","text":"Just to cast something in the mold, I tried candle wax. So I took the mold home and heated up an old candle. Heating the candle wax over a water bath. When all the wax had turned liquid and transparent, I poured it carefully into the mold. I may have heated the wax too quickly. As it gets hotter, the wax continues to expand according to its coefficient of thermal expansion. Then I poured it into the mold and it starts to cool down and contract. The greater the difference between these two temperatures, the worse the warping and wrinkles get. Thermal contraction during cooling results in wrinkles on the surface. Then I removed the cast wax part from the silicone mold. A small hand immediately grabbed the cast wax part after I removed it from the mold. The resulting part is not nearly as smooth as the mold is. Apparently the thermal contractions were so bad that this side got wrinkled too. The surface is all wrinkled after thermal contraction. But the text came out well. My little helper played around with the mold for a second. This resulted in the mold being bent all the way backwards and developing cracks. He wasn't trying to damage it, he was just being very four years old. This is a lesson for me, the mold was too thin in the middle. I was trying to save material and I thought I could get away with it. \"There is a crack in everything. That's how the light gets in,\" sang Leonard Cohen .","title":"Casting wax in the mold"},{"location":"assignments/week12.html#safety-data-sheets","text":"The silicones and the polyurethane that I ordered came with Safety Data Sheets.","title":"Safety Data Sheets"},{"location":"assignments/week12.html#soft-107-silicone","text":"Soft 107 is a very soft and flexible two-component, room temperature condensation cure silicone. It has a durometer of 6. I guess they named it before they got the results back from the official durometer test. The condensation cure (or tin curing) means that this silicone probably shrinks a bit when curing. But the great thing about this material is that it's food safe (indirect contact). Because it's so soft, it's also ideal to make pads for pad printing. I've wanted to try pad printing for years, now I have what I need to do it. The silicone is mixed with a blue catalyst in a 10:1 silicone to catalyst ratio by weight. It's recommended to mix the silicone in a clean plastic container with a volume 3-4 times the volume of the silicone being mixed. Then there's space for it to expand when you put it in a vacuum pot to draw out the bubbles. If you don't have access to a vacuum chamber, they recommend a high pour: You pour into the bottom corner of the mold the whole time, keeping the mixing cup high enough above it to form a thin, steady stream of silicone. The bubbles will be pushed out before entering the thin stream and then the silcone mass slowly levels itself in the mold. This is what I did, and I also used a vacuum pot. The Soft 107 silicone may be harmful in contact with skin. Always use disposable gloves when handling it. Ensure good ventilation while working with it. Make sure that it doesn't leak into the environment. You must make sure that it cures before disposing of any remains. It's good to have a shower, an eyewash station and a ventilation system. It would be good for the Fab Lab to get an eyewash station. Rinse with plenty of water if you get the uncured material on your skin, in your eyes or into your mouth.","title":"Soft 107 Silicone"},{"location":"assignments/week12.html#quantum-silicones-qm-262-silicone","text":"QM 262 is a two-component, room temperature addition cure (platinum cure) silicon material. The platinum cure means that there is essentially no shrinkage while curing. This silicone has excellent mechanical properties and is Michal Zalewski's favorite silicone for precision mold making . It has a high durometer of 60 Shore A. You mix it 10:1 silicone to blue catalyst by weight. The mixing instructions are the same as for the Soft 107 silicone material. The safety instructions are also the same as for the Soft 107 silicone.","title":"Quantum Silicones QM 262 Silicone"},{"location":"assignments/week12.html#innovative-polymers-ie-3075-polyurethane","text":"This very strong polyurethane is mixed 1:1 by volume from an isocyanate and a polyol:","title":"Innovative Polymers IE-3075 polyurethane"},{"location":"assignments/week12.html#raku-tool-ie-3075-isocyanate","text":"Rinse with plenty of water and call a poison center if you get the uncured material on yourself. It may cause respiratory irritation and damage to organs through prolonged or repeated exposure. The material is suspected of causing cancer. Use protective glasses, glove, clothing and in case of inadequate ventilation, wear respiratory protection. Heating may cause an explosion. Make sure to completely cure the material before discarding the container.","title":"RAKU TOOL IE-3075 Isocyanate"},{"location":"assignments/week12.html#raku-tool-ie-3075-polyol","text":"The safety procedures are very similar to the isocyanate, except the polyol is less flammable. There is much less information in its Safety Data Sheet.","title":"RAKU TOOL IE-3075 Polyol"},{"location":"assignments/week12.html#ultralease-ure-gp-urethane-parfilm-mold-release","text":"Ultralease URE GP is a solvent-less and virtually odorless mold release. It's heat stable to 315\u00b0C, so it should also be good for injection molding. It doesn't interfere with painting, coating or bonding. It should last for several molding cycles. It's recommended for use with polyurethanes, silicones and rubbers. Wear eye protection, long sleeves, chemical resistant gloves and an organic vapor respirator. Also ensure proper ventilation. The can may explode if heated and the contents may displace oxygen and cause rapid suffocation. Keep away from heat, sparks, flames and hot surfaces. Do not pierce or burn the pressurized container, even after use. And definitely don't spray it onto an open flame. Rinse with plenty of water if you get it on yourself. And because it's in an aerosol can, contact with the rapidly expanding gas may cause burns or frostbite, as the gas removes heat from your skin to fuel its expansion. .md-content__button { display: none; }","title":"Ultralease URE GP Urethane Parfilm Mold Release"},{"location":"assignments/week13.html","text":"Networking and Communications Hello I2C I did this week's work in Neskaupsta\u00f0ur, which is as far as you can get from \u00cdsafj\u00f6r\u00f0ur in Iceland. I had to take two flights to get there! The annual Icelandic Fab Lab Bootcamp was held in Neskaupsta\u00f0ur this year. I think everybody got lots out of it and this was the first time that we set up a repo and a web site for an Icelandic bootcamp. We're under the influence of the 2023 Instructor's Bootcamp in Amsterdam. This is what I packed for the trip (plus a few items of clothing and a toothbrush): I brought a large part of our electronics inventory, just in case. I decided to do as Hafey did, and make Adrian's Hello I2C boards. The project consists of a master module which sends an I2C message and two nodes that receive the message and turn on an LED . I milled Adrian's boards and populated them with components. Hafey brought ATtiny412 ICs for me from Fab Lab Reykjav\u00edk. I2C node with adapter. For my own board I decided to use a laser diode. I had wanted to try one of those since I saw \u00c1rni Bj\u00f6rnsson's output devices video . My laser diode. My I2C laser diode PCB design. I tried the Gerber output for the first time. I tried FlatCAM, which went well, but the toolpath left thin strips of copper in between the traces. I would need to adjust the settings before milling again. It also didn't mill the text on the board, but I didn't really care about that. My laser diode I2C board after milling. In this milling machine, the FR1 PCB blanks are fastened to the wasteboard with double-sided tape. Here's my I2C laser board populated and connected to the adapter. Hafey pointed out to me that Adrian's Arduino code had an error. The master node was set up to send a message to one node, and then another. But the number of the node was the same in both cases: the command Wire.write(1) was in both places. After changing the second one to Wire.write(2) , the code worked. My board didn't work. I was able to program the IC, but the LED didn't turn on. Download I2C laser KiCAD project Download I2C master code Download I2C node 1 code Download I2C node2 code Fab Modules When discussing Fab Modules/Mods/Mods Project, few of our colleagues thought it was a little bit strange to turn vector drawings into bitmaps before running them through the CAM software. But Frosti told me that Neil had mentioned at some point that CNC machines operate on a bitmap grid in the end. That's a good point. We're using Computer Numerical Control, not Computer Vector Control. The Gcodes are all encoded as Cartesian coordinates with finite precision. So if you use a high enough resolution in the PNG image, you shouldn't lose any accuracy. The workflow works well, and I like using the old Fab Modules that we have running locally on a Linux laptop. Late in the evening I wanted to make the UPDI adapter that I needed to connect my Hello I2C boards to the computer. Frosti took the opportunity to open up Mods Project . After tweaking a few settings and figuring out that we needed to turn on the Save module and then click Calculate again to get an RML export, the milling went great! We just needed to set the file type to RML1 in the Roland control software, instead of RML NC code. microSD breakout board This video might be useful. It shows how to solder wires directly to an SD card to communicate with it. So the passive components on commercial SD breakout modules aren't strictly necessary, although I'm sure they make the communications more reliable. The PCB layout of my microSD card breakout board on the left and Janet Liu's schematic on the right. Her schematic shows which SD card pin matches which SPI pin on this particular SD card holder from Amphenol . You need to open the image in a new tab to see the pin names on my PCB layout. I didn't consider which way you put the SD card into the slot, so now that I've soldered the headers onto the board, the microSD card is stuck there for all eternity. I hope I can at least communicate with it. Download SD card breakout board KiCAD project .md-content__button { display: none; }","title":"13. Networking and Communications"},{"location":"assignments/week13.html#networking-and-communications","text":"","title":"Networking and Communications   "},{"location":"assignments/week13.html#hello-i2c","text":"I did this week's work in Neskaupsta\u00f0ur, which is as far as you can get from \u00cdsafj\u00f6r\u00f0ur in Iceland. I had to take two flights to get there! The annual Icelandic Fab Lab Bootcamp was held in Neskaupsta\u00f0ur this year. I think everybody got lots out of it and this was the first time that we set up a repo and a web site for an Icelandic bootcamp. We're under the influence of the 2023 Instructor's Bootcamp in Amsterdam. This is what I packed for the trip (plus a few items of clothing and a toothbrush): I brought a large part of our electronics inventory, just in case. I decided to do as Hafey did, and make Adrian's Hello I2C boards. The project consists of a master module which sends an I2C message and two nodes that receive the message and turn on an LED . I milled Adrian's boards and populated them with components. Hafey brought ATtiny412 ICs for me from Fab Lab Reykjav\u00edk. I2C node with adapter. For my own board I decided to use a laser diode. I had wanted to try one of those since I saw \u00c1rni Bj\u00f6rnsson's output devices video . My laser diode. My I2C laser diode PCB design. I tried the Gerber output for the first time. I tried FlatCAM, which went well, but the toolpath left thin strips of copper in between the traces. I would need to adjust the settings before milling again. It also didn't mill the text on the board, but I didn't really care about that. My laser diode I2C board after milling. In this milling machine, the FR1 PCB blanks are fastened to the wasteboard with double-sided tape. Here's my I2C laser board populated and connected to the adapter. Hafey pointed out to me that Adrian's Arduino code had an error. The master node was set up to send a message to one node, and then another. But the number of the node was the same in both cases: the command Wire.write(1) was in both places. After changing the second one to Wire.write(2) , the code worked. My board didn't work. I was able to program the IC, but the LED didn't turn on. Download I2C laser KiCAD project Download I2C master code Download I2C node 1 code Download I2C node2 code","title":"Hello I2C"},{"location":"assignments/week13.html#fab-modules","text":"When discussing Fab Modules/Mods/Mods Project, few of our colleagues thought it was a little bit strange to turn vector drawings into bitmaps before running them through the CAM software. But Frosti told me that Neil had mentioned at some point that CNC machines operate on a bitmap grid in the end. That's a good point. We're using Computer Numerical Control, not Computer Vector Control. The Gcodes are all encoded as Cartesian coordinates with finite precision. So if you use a high enough resolution in the PNG image, you shouldn't lose any accuracy. The workflow works well, and I like using the old Fab Modules that we have running locally on a Linux laptop. Late in the evening I wanted to make the UPDI adapter that I needed to connect my Hello I2C boards to the computer. Frosti took the opportunity to open up Mods Project . After tweaking a few settings and figuring out that we needed to turn on the Save module and then click Calculate again to get an RML export, the milling went great! We just needed to set the file type to RML1 in the Roland control software, instead of RML NC code.","title":"Fab Modules"},{"location":"assignments/week13.html#microsd-breakout-board","text":"This video might be useful. It shows how to solder wires directly to an SD card to communicate with it. So the passive components on commercial SD breakout modules aren't strictly necessary, although I'm sure they make the communications more reliable. The PCB layout of my microSD card breakout board on the left and Janet Liu's schematic on the right. Her schematic shows which SD card pin matches which SPI pin on this particular SD card holder from Amphenol . You need to open the image in a new tab to see the pin names on my PCB layout. I didn't consider which way you put the SD card into the slot, so now that I've soldered the headers onto the board, the microSD card is stuck there for all eternity. I hope I can at least communicate with it. Download SD card breakout board KiCAD project .md-content__button { display: none; }","title":"microSD breakout board"},{"location":"assignments/week14.html","text":"Interface and Application Programming Frankenstein MCU Presenting my addition to the FMCU to Neil Gershenfeld and Fab Academy Class 2023. Link to the video. Since my instructor \u00de\u00f3rarinn is working with Fran Sanchez on a IoT button that connects Fab Labs together, I decided to clone the repo and take a look at it. The current MCU uses Zoom to connect the Fab Labs together with live video feeds. So I searched for a way to embed Zoom into your own application and found the Zoom Meeting SDK for web , and in particular this JavaScript example . I cloned the repo, got a secret key as a Zoom Developer and I've got the authentication server running locally on Node.js, but the readme says that I need to send a POST request to the server. HTTP POST request trouble on the Zoom authentication server. Google didn't give me any understandable instructions on how to make a POST request to a localhost Node.js server. So I turned to ChatGPT. This is the first time I ask it to help me solve a problem. And help it did! ChatGPT to the rescue! Making an HTTP POST request using Postman. At the bottom you can see the signature that the Zoom authentication server returns. Thanks ChatGPT! The example works on its own. For this example to work you need to get a special authentication repo, which runs a node js server that listens. You go to Zoom Marketplace and get developer credentials and put them into the code in this repo and run the node server. Then when you press Join Meeting on the FMCU website, the website sends a message to the authentication server and gets a passkey, and then logs you into Zoom automatically. Now it's in a sidebar! I added the Zoom Meeting SDK JavaScript code, but the button doesn't work. I took to Mattermost and showed the non-working \"Join Meeting\" button to \u00de\u00f3rarinn and Fran. Fran replied with this: So I looked at the two CSS files in the FMCU repo and saw two instances of a hyperlink having the .repo CSS class. So I gave the Zoom code the .repo CSS class and ran the Node server: It works! Wow, I didn't really expect that. I can even move the window around. Now I need to connect this to a physical circuit. I'm looking into how the FMCU button uses MQTT to send a message to the Node server. I'm also looking for a way to do serial communication using Node. Here's the code with instructions on how to run it locally: Link to the FMCU-Zoom repo Visualizing a light sensor I experimented with creating a computer interface for the light sensor that I made in Input Devices week . The code that runs on the Xiao is very simple. It comes from Adri\u00e1n Torres. The code uses analogRead to read the value from the IR sensor and then writes it to the serial port: Download IR sensor Arduino code I used a few different Python scripts to receive the IR light values from the serial port. First I tried Neil's hello.light.45.py code but it reads single characters from the serial port and I had programmed the Xiao to send whole lines at a time. I stopped there with Neil's code. Then I found a nice tutorial showing how you can list the available COM devices in the terminal, pick one by typing its number, and open it using serial.tools. Once the serial port is open, I run an infinite while loop and read one line from the serial port at a time, decode it using UTF-8 character encoding and then turn that string into an integer. I do the reading and converting in only two lines of Python code: packet = serialInst . readline () y = int ( packet . decode ( 'utf' )) Then, to get a very rudimentary graphical representation going, I use an if statement and display one - if the value is between 0 and 100, display -- if the value is between 100 and 200 and so on, up to 1000 ( ---------- ). As simple as it gets. This barely counts as a graphical user interface. Download Python terminal visualization code I also tried to make a GUI using Tkinter. I found a useful code snippet in example 1 in this tutorial , which creates a small GUI window and displays a title and a red and green rectangle with empty space between them. It's static, but by using my y variable (the number that is streaming into the serial port) instead of hardcoded numbers, I can make the bar move. The static GUI example. I could get the Tkinter interface to run separately and I could also get a stream of data from the IR sensor separately, but I had trouble combining them. Apparently, the reason is that I have two infinite while loops and the one that comes first in the code blocks the other. While the code waits for input from the serial port, nothing else can happen. And while the interface is running, nothing else can happen. I couldn't figure this out using the examples that I found online. The following day I gave up and asked ChatGPT to change the code to make the two loops run concurrently. That resulted in code that ran, but I needed to make some changes to it. Only the grey bar was changing size between 0 and 1000 pixels, so I put 1000-y as the width of the black bar. That worked nicely. The interface was also sluggish, but I fixed that by changing root.after(100, readFromSerial) to root.after(10, readFromSerial) . Then there is a much shorter delay for updating the interface. We have a GUI that runs smoothly. Download Python GUI code Some random thoughts about my robot arm final project I've looked into many ways of making an interface for a robot arm. Highlights include: Python Tkinter ROS Urumbu Modular Things Processing P5.js Python script in Blender Python script in Fusion 360 Python script in FreeCAD FreeCAD robot workbench RoboDK Grasshopper WebSerial PyScript Webassembly ThreeJS OpenCV NodeJS Svelte Threlte The Pimoroni Pi Pico library Phew! Streamlit NiceGUI WebSockets WebTransport CodeMirror Chilipepr Crossbar.io libreconnect I like this minimal Pi Pico web server that was shared on Doctor Monk's DIY Electronics Blog. This captive portal guide also looks interesting. It uses the Pimoroni Phew! web server library, which is super simple and was written specifically for the Raspberry Pi Pico. I know that ROS is probably the way to go, but it's huge and I don't know where to start. Maybe here , since he asks at the start of the video if you want to build a robot that works with ROS but don't know where to start. .md-content__button { display: none; }","title":"14. Interface and Application Programming"},{"location":"assignments/week14.html#interface-and-application-programming","text":"","title":"Interface and Application Programming   "},{"location":"assignments/week14.html#frankenstein-mcu","text":"Presenting my addition to the FMCU to Neil Gershenfeld and Fab Academy Class 2023. Link to the video. Since my instructor \u00de\u00f3rarinn is working with Fran Sanchez on a IoT button that connects Fab Labs together, I decided to clone the repo and take a look at it. The current MCU uses Zoom to connect the Fab Labs together with live video feeds. So I searched for a way to embed Zoom into your own application and found the Zoom Meeting SDK for web , and in particular this JavaScript example . I cloned the repo, got a secret key as a Zoom Developer and I've got the authentication server running locally on Node.js, but the readme says that I need to send a POST request to the server. HTTP POST request trouble on the Zoom authentication server. Google didn't give me any understandable instructions on how to make a POST request to a localhost Node.js server. So I turned to ChatGPT. This is the first time I ask it to help me solve a problem. And help it did! ChatGPT to the rescue! Making an HTTP POST request using Postman. At the bottom you can see the signature that the Zoom authentication server returns. Thanks ChatGPT! The example works on its own. For this example to work you need to get a special authentication repo, which runs a node js server that listens. You go to Zoom Marketplace and get developer credentials and put them into the code in this repo and run the node server. Then when you press Join Meeting on the FMCU website, the website sends a message to the authentication server and gets a passkey, and then logs you into Zoom automatically. Now it's in a sidebar! I added the Zoom Meeting SDK JavaScript code, but the button doesn't work. I took to Mattermost and showed the non-working \"Join Meeting\" button to \u00de\u00f3rarinn and Fran. Fran replied with this: So I looked at the two CSS files in the FMCU repo and saw two instances of a hyperlink having the .repo CSS class. So I gave the Zoom code the .repo CSS class and ran the Node server: It works! Wow, I didn't really expect that. I can even move the window around. Now I need to connect this to a physical circuit. I'm looking into how the FMCU button uses MQTT to send a message to the Node server. I'm also looking for a way to do serial communication using Node. Here's the code with instructions on how to run it locally: Link to the FMCU-Zoom repo","title":"Frankenstein MCU"},{"location":"assignments/week14.html#visualizing-a-light-sensor","text":"I experimented with creating a computer interface for the light sensor that I made in Input Devices week . The code that runs on the Xiao is very simple. It comes from Adri\u00e1n Torres. The code uses analogRead to read the value from the IR sensor and then writes it to the serial port: Download IR sensor Arduino code I used a few different Python scripts to receive the IR light values from the serial port. First I tried Neil's hello.light.45.py code but it reads single characters from the serial port and I had programmed the Xiao to send whole lines at a time. I stopped there with Neil's code. Then I found a nice tutorial showing how you can list the available COM devices in the terminal, pick one by typing its number, and open it using serial.tools. Once the serial port is open, I run an infinite while loop and read one line from the serial port at a time, decode it using UTF-8 character encoding and then turn that string into an integer. I do the reading and converting in only two lines of Python code: packet = serialInst . readline () y = int ( packet . decode ( 'utf' )) Then, to get a very rudimentary graphical representation going, I use an if statement and display one - if the value is between 0 and 100, display -- if the value is between 100 and 200 and so on, up to 1000 ( ---------- ). As simple as it gets. This barely counts as a graphical user interface. Download Python terminal visualization code I also tried to make a GUI using Tkinter. I found a useful code snippet in example 1 in this tutorial , which creates a small GUI window and displays a title and a red and green rectangle with empty space between them. It's static, but by using my y variable (the number that is streaming into the serial port) instead of hardcoded numbers, I can make the bar move. The static GUI example. I could get the Tkinter interface to run separately and I could also get a stream of data from the IR sensor separately, but I had trouble combining them. Apparently, the reason is that I have two infinite while loops and the one that comes first in the code blocks the other. While the code waits for input from the serial port, nothing else can happen. And while the interface is running, nothing else can happen. I couldn't figure this out using the examples that I found online. The following day I gave up and asked ChatGPT to change the code to make the two loops run concurrently. That resulted in code that ran, but I needed to make some changes to it. Only the grey bar was changing size between 0 and 1000 pixels, so I put 1000-y as the width of the black bar. That worked nicely. The interface was also sluggish, but I fixed that by changing root.after(100, readFromSerial) to root.after(10, readFromSerial) . Then there is a much shorter delay for updating the interface. We have a GUI that runs smoothly. Download Python GUI code","title":"Visualizing a light sensor"},{"location":"assignments/week14.html#some-random-thoughts-about-my-robot-arm-final-project","text":"I've looked into many ways of making an interface for a robot arm. Highlights include: Python Tkinter ROS Urumbu Modular Things Processing P5.js Python script in Blender Python script in Fusion 360 Python script in FreeCAD FreeCAD robot workbench RoboDK Grasshopper WebSerial PyScript Webassembly ThreeJS OpenCV NodeJS Svelte Threlte The Pimoroni Pi Pico library Phew! Streamlit NiceGUI WebSockets WebTransport CodeMirror Chilipepr Crossbar.io libreconnect I like this minimal Pi Pico web server that was shared on Doctor Monk's DIY Electronics Blog. This captive portal guide also looks interesting. It uses the Pimoroni Phew! web server library, which is super simple and was written specifically for the Raspberry Pi Pico. I know that ROS is probably the way to go, but it's huge and I don't know where to start. Maybe here , since he asks at the start of the video if you want to build a robot that works with ROS but don't know where to start. .md-content__button { display: none; }","title":"Some random thoughts about my robot arm final project"},{"location":"assignments/week15.html","text":"Wild Card Week Vacuum forming Design The annual Icelandic Fab Lab Bootcamp was held in Fab Lab Neskaupsta\u00f0ur in 2023. M\u00f3ses, the Fab Lab manager, showed me how to do vacuum forming with the Mayku FormBox. The Mayku Formbox is a basic 200x200mm vacuum forming machine. Its heating element goes up to 340\u00b0C You need to connect a vacuum cleaner to it for it to work. It works fine, but I think I would find it annoying to have to bring the vacuum cleaner every time I used the vacuum former. First I brought a rectangular 3D print with #FABLABISA inscribed on it. M\u00f3ses said that the object was rather small, the edges needed to be rounded and he also showed me examples of details similar to my text not coming through in the plastic sheet. Vacuum forming examples. Sharp corners and small details don't work well. The text J\u00d3L (Christmas) in the upper right corner doesn't come through at all. He also mentioned that circular things tend to work the best. That makes sense, the stretching will be uniform and there are no corners where creases can form in the plastic sheet. I decided to go with the Fab Lab logo instead. I rounded the chocolate button shape and the logo as much as the geometry would allow, and showed M\u00f3ses a finished print. I used a Mayku Clear Sheet , which is made of PETG and is food safe. The official Mayku sheets are not cheap. Now that's a better candidate for vacuum forming. This should work nicely, said M\u00f3ses, but there is a question whether the pocket in the middle will be rendered in the plastic sheet. I decided to try making holes in the middle to let the air out. printed four versions of buttons with holes in the middle, for a total of five buttons: No holes 1.5 mm holes 2 mm holes 2 mm holes with small holes in the three smallest crevices 2 mm holes with rectangular holes in the three smallest crevices. Buttons with holes added to draw vacuum through the center of the part. 3D printing Completed 3D prints. Vacuum forming When the prints were complete, we connected a vacuum cleaner to the FormBox and heated it up. M\u00f3ses told me that the sheet needed to be heated up until it droops 2 or 3 cm down. Then it's soft enough to form. Sheet is hot and drooping and ready to form. Then the handle locks are released and the frame with the sheet is quickly lowered onto the bed containing the 3D printed positive forms. The vacuum is held for a minute or two and the sheet is allowed to cool down a bit. Then it's ready! Done! To my surprise, all the forms came out well. Apparently the tiny crevices in the logo were enough to get the air out of the middle part. The button with no holes actually came out the best! Better label both the buttons and the sheet to remember which is which. I tried measuring the depth of the pocket in the middle of each button with a caliper and they seemed to be the same, whether there were holes in the forms or not. Measuring the depth in the middle. I like vacuum forming but its applications are limited. It would be nice to host a short class on chocolate mold making here at the Fab Lab, but I don't know what else I would use the vacuum former for. Mostly it's used to make packaging, and I don't make any packaging. Chocolate casting I followed instructions from the Icelandic newspaper Morgunbla\u00f0i\u00f0, so they may not be useful to you. First melt the chocolate slowly in a water bath. Don't let any water get into the chocolate! You heat it up to 50\u00b0C. Melting the chocolate. Then you let the temperature drop to 42\u00b0C and you add a third of the same kind of chocolate (chopped) to the pot. Stir it into the melt. Chopping the rest of the chocolate. Putting the chopped chocolate into the pot. Then you let the temperature drop down to 32\u00b0C. Now the chocolate is tempered and you can pour it into the molds. Pouring into the molds. I also shook the molds to flatten out the bottoms of the buttons. And here they are, lined up with the 3D prints that created them. Row of chocolate Fab Lab buttons. They all came out well. I would skip the holes in the middle next time. Now it's time for testing. Is the chocolate tempered or not? I suspect that my next attempt may be more successful, since I won't have to take pictures and videos while I try to get the temperature profile right. Here's the Fusion model of the Fab Lab chocolate button: Download chocolate button CAD model Further experimentation It would be interesting to try printing the original Fab Lab button model with an increased distance between the 3D printed lines. I think that would make it possible to draw vacuum through the part without having to draw holes manually. And the top surface texture might be nicer, too. FPGA (Field-Programmable Gate Array) The Runber FPGA board. I think the program is supposed to blink the eight LEDs above the FPGA chip, but the tutorial isn't entirely clear about it. Why I've been curious about FPGAs for some time, for three reasons: I took the course Computer-Controlled Hardware in engineering school at the University of Iceland. One of the lecturers is an FPGA designer at the prosthetic company \u00d6ssur . He showed how the company's Power Knee has developed over the years from a large, complicated circuit board with lots of components to a much smaller footprint, essentially containing only a microcontroller and an FPGA. The FPGA can contain any digital circuitry, which is really interesting. My wife gave me the book Einstein's Shadow for Christmas a few years ago. It's an entertainingly written account of the quest to photograph a black hole. It involved extremely high-speed FPGA chips which made the Event Horizon Telescope possible. In the 2023 Fab Academy Student Bootcamp , Krisjanis Rijnieks mentioned that ordinary people now have the opportunity to design digital chips and have them made, using open source toolchains like Google Silicon . Google also offers the Open MPW program which is a lottery where you can submit your chip design and if you're lucky, Google will pay for the chip fabrication. I also discovered the Zero to ASIC course from Matt Venn, where he teaches people to design Application Specific Integrated Circuits from scratch and has them manufactured for them. Exciting stuff! Chips are designed using VDHL or Verilog, which are hardware description languages. You also use these languages to program FPGAs. Before committing a chip design to manufacturing, people usually test it on an FPGA. In Wild Card Week, I thought to myself: If I don't try an FPGA now, I probably never will. For this week, I ordered the cheapest FPGA board that I could find ($30) that still has a set of tutorials available, the Gowin RUNBER board from Seeed Studio. How An FPGA is a circuit that you design with a hardware description language. The unit of computation is is one bit, which can be a 1 or a 0. The FPGA consists of lots of Look-Up Tables (LUTs), which specify what happens when ones or zeros enter them. These LUTs form logic gates which can be combined to make any circuit, even a whole ARM microcontroller! Indeed, many FPGAs are programmed to contain a microcontroller core as well as additional digital ciruits. The Gowin GW1N-4 on my board has 4608 LUTs with four inputs each. I managed to get a license by sending an email to Gowin customer service and then I followed the first tutorial in this PDF . The circuit blinks eight LEDs, one at a time. In Gowin FPGA Designer, I created a new project: I selected FPGA Design Project: And then I selected my FPGA chip: I got a summary of the project settings before finishing the setup: Now I had a new FPGA design project. So far so good: I created a new Verilog file and pasted the code from the tutorial. Then I clicked Synthesize: That seemed to work OK. Then I went into Tools -> Floor Planner to see how the circuit looked inside the FPGA. I got an error. Then I looked a little further in the tutorial and found that I needed to create a constraints file, where I specify which pins I'm using and what they are. I went into File -> New, selected Physical Constraints File and gave it the name constr.cst: I just copied the code from the tutorial and pasted it into the constraints file. This is how it looks in the editor: The constraints file defines eight output pins that are supposed to turn eight LEDs on, one at a time. Now I could go into Tools -> Floor Planner. I got a graphical representation of the chip. I would expect eight pins to be highlighted in blue, but only six pins are highlighted. But I wouldn't know how to fix the code, and the messages in the terminal seemed positive: So I soldiered on. I selected the I/O Constraints at the bottom of the window and got a list of the constraints that were defined in the .cst file: The third and final file I needed to create before programming the FPGA was a timing constraints file. In FPGAs, everything is counted in clock cycles, and you need to define the clock frequency. I opened the Timing Constraints Editor: There I created a Timing Constraints file with a period of 83.333 nanoseconds and a frequency of 12 MHz: The timing constraints file only contains a single line of code specifying the clock speed: Then I selected Run Place and Route. That's the button with four colored squares that looks a bit like the Windows icon. I got a bunch of Generate file completed messages in the terminal, which seemed promising: If you look again at the image above, you can see the mouse hovering over the Programmer icon, which is a green downward arrow. I now pressed it and hoped for the best. I have no idea what I am doing. The programmer found a USB device, so I clicked Program/Configure. Here the tutorial ends, but I still needed to make some selections. I found the manual for the Programmer in the Gowin software directory and followed instructions that told me to select Embedded flash mode and select the .fs programming file in the dialog: When I clicked Save I got an error: This seems like a simple error, all I have to do is select the right chip. I tried selecting all the chips that have similar names to mine (GWIN-4, GWIN4B, GWIN-4D and all their versions). I had to select the chip both in the Floor Planner and in the Programmer. But nothing worked. I don't know where to go from here, so I'll stop. .md-content__button { display: none; }","title":"15. Wild Card Week"},{"location":"assignments/week15.html#wild-card-week","text":"","title":"Wild Card Week   "},{"location":"assignments/week15.html#vacuum-forming","text":"","title":"Vacuum forming"},{"location":"assignments/week15.html#design","text":"The annual Icelandic Fab Lab Bootcamp was held in Fab Lab Neskaupsta\u00f0ur in 2023. M\u00f3ses, the Fab Lab manager, showed me how to do vacuum forming with the Mayku FormBox. The Mayku Formbox is a basic 200x200mm vacuum forming machine. Its heating element goes up to 340\u00b0C You need to connect a vacuum cleaner to it for it to work. It works fine, but I think I would find it annoying to have to bring the vacuum cleaner every time I used the vacuum former. First I brought a rectangular 3D print with #FABLABISA inscribed on it. M\u00f3ses said that the object was rather small, the edges needed to be rounded and he also showed me examples of details similar to my text not coming through in the plastic sheet. Vacuum forming examples. Sharp corners and small details don't work well. The text J\u00d3L (Christmas) in the upper right corner doesn't come through at all. He also mentioned that circular things tend to work the best. That makes sense, the stretching will be uniform and there are no corners where creases can form in the plastic sheet. I decided to go with the Fab Lab logo instead. I rounded the chocolate button shape and the logo as much as the geometry would allow, and showed M\u00f3ses a finished print. I used a Mayku Clear Sheet , which is made of PETG and is food safe. The official Mayku sheets are not cheap. Now that's a better candidate for vacuum forming. This should work nicely, said M\u00f3ses, but there is a question whether the pocket in the middle will be rendered in the plastic sheet. I decided to try making holes in the middle to let the air out. printed four versions of buttons with holes in the middle, for a total of five buttons: No holes 1.5 mm holes 2 mm holes 2 mm holes with small holes in the three smallest crevices 2 mm holes with rectangular holes in the three smallest crevices. Buttons with holes added to draw vacuum through the center of the part.","title":"Design"},{"location":"assignments/week15.html#3d-printing","text":"Completed 3D prints.","title":"3D printing"},{"location":"assignments/week15.html#vacuum-forming_1","text":"When the prints were complete, we connected a vacuum cleaner to the FormBox and heated it up. M\u00f3ses told me that the sheet needed to be heated up until it droops 2 or 3 cm down. Then it's soft enough to form. Sheet is hot and drooping and ready to form. Then the handle locks are released and the frame with the sheet is quickly lowered onto the bed containing the 3D printed positive forms. The vacuum is held for a minute or two and the sheet is allowed to cool down a bit. Then it's ready! Done! To my surprise, all the forms came out well. Apparently the tiny crevices in the logo were enough to get the air out of the middle part. The button with no holes actually came out the best! Better label both the buttons and the sheet to remember which is which. I tried measuring the depth of the pocket in the middle of each button with a caliper and they seemed to be the same, whether there were holes in the forms or not. Measuring the depth in the middle. I like vacuum forming but its applications are limited. It would be nice to host a short class on chocolate mold making here at the Fab Lab, but I don't know what else I would use the vacuum former for. Mostly it's used to make packaging, and I don't make any packaging.","title":"Vacuum forming"},{"location":"assignments/week15.html#chocolate-casting","text":"I followed instructions from the Icelandic newspaper Morgunbla\u00f0i\u00f0, so they may not be useful to you. First melt the chocolate slowly in a water bath. Don't let any water get into the chocolate! You heat it up to 50\u00b0C. Melting the chocolate. Then you let the temperature drop to 42\u00b0C and you add a third of the same kind of chocolate (chopped) to the pot. Stir it into the melt. Chopping the rest of the chocolate. Putting the chopped chocolate into the pot. Then you let the temperature drop down to 32\u00b0C. Now the chocolate is tempered and you can pour it into the molds. Pouring into the molds. I also shook the molds to flatten out the bottoms of the buttons. And here they are, lined up with the 3D prints that created them. Row of chocolate Fab Lab buttons. They all came out well. I would skip the holes in the middle next time. Now it's time for testing. Is the chocolate tempered or not? I suspect that my next attempt may be more successful, since I won't have to take pictures and videos while I try to get the temperature profile right. Here's the Fusion model of the Fab Lab chocolate button: Download chocolate button CAD model","title":"Chocolate casting"},{"location":"assignments/week15.html#further-experimentation","text":"It would be interesting to try printing the original Fab Lab button model with an increased distance between the 3D printed lines. I think that would make it possible to draw vacuum through the part without having to draw holes manually. And the top surface texture might be nicer, too.","title":"Further experimentation"},{"location":"assignments/week15.html#fpga-field-programmable-gate-array","text":"The Runber FPGA board. I think the program is supposed to blink the eight LEDs above the FPGA chip, but the tutorial isn't entirely clear about it.","title":"FPGA (Field-Programmable Gate Array)"},{"location":"assignments/week15.html#why","text":"I've been curious about FPGAs for some time, for three reasons: I took the course Computer-Controlled Hardware in engineering school at the University of Iceland. One of the lecturers is an FPGA designer at the prosthetic company \u00d6ssur . He showed how the company's Power Knee has developed over the years from a large, complicated circuit board with lots of components to a much smaller footprint, essentially containing only a microcontroller and an FPGA. The FPGA can contain any digital circuitry, which is really interesting. My wife gave me the book Einstein's Shadow for Christmas a few years ago. It's an entertainingly written account of the quest to photograph a black hole. It involved extremely high-speed FPGA chips which made the Event Horizon Telescope possible. In the 2023 Fab Academy Student Bootcamp , Krisjanis Rijnieks mentioned that ordinary people now have the opportunity to design digital chips and have them made, using open source toolchains like Google Silicon . Google also offers the Open MPW program which is a lottery where you can submit your chip design and if you're lucky, Google will pay for the chip fabrication. I also discovered the Zero to ASIC course from Matt Venn, where he teaches people to design Application Specific Integrated Circuits from scratch and has them manufactured for them. Exciting stuff! Chips are designed using VDHL or Verilog, which are hardware description languages. You also use these languages to program FPGAs. Before committing a chip design to manufacturing, people usually test it on an FPGA. In Wild Card Week, I thought to myself: If I don't try an FPGA now, I probably never will. For this week, I ordered the cheapest FPGA board that I could find ($30) that still has a set of tutorials available, the Gowin RUNBER board from Seeed Studio.","title":"Why"},{"location":"assignments/week15.html#how","text":"An FPGA is a circuit that you design with a hardware description language. The unit of computation is is one bit, which can be a 1 or a 0. The FPGA consists of lots of Look-Up Tables (LUTs), which specify what happens when ones or zeros enter them. These LUTs form logic gates which can be combined to make any circuit, even a whole ARM microcontroller! Indeed, many FPGAs are programmed to contain a microcontroller core as well as additional digital ciruits. The Gowin GW1N-4 on my board has 4608 LUTs with four inputs each. I managed to get a license by sending an email to Gowin customer service and then I followed the first tutorial in this PDF . The circuit blinks eight LEDs, one at a time. In Gowin FPGA Designer, I created a new project: I selected FPGA Design Project: And then I selected my FPGA chip: I got a summary of the project settings before finishing the setup: Now I had a new FPGA design project. So far so good: I created a new Verilog file and pasted the code from the tutorial. Then I clicked Synthesize: That seemed to work OK. Then I went into Tools -> Floor Planner to see how the circuit looked inside the FPGA. I got an error. Then I looked a little further in the tutorial and found that I needed to create a constraints file, where I specify which pins I'm using and what they are. I went into File -> New, selected Physical Constraints File and gave it the name constr.cst: I just copied the code from the tutorial and pasted it into the constraints file. This is how it looks in the editor: The constraints file defines eight output pins that are supposed to turn eight LEDs on, one at a time. Now I could go into Tools -> Floor Planner. I got a graphical representation of the chip. I would expect eight pins to be highlighted in blue, but only six pins are highlighted. But I wouldn't know how to fix the code, and the messages in the terminal seemed positive: So I soldiered on. I selected the I/O Constraints at the bottom of the window and got a list of the constraints that were defined in the .cst file: The third and final file I needed to create before programming the FPGA was a timing constraints file. In FPGAs, everything is counted in clock cycles, and you need to define the clock frequency. I opened the Timing Constraints Editor: There I created a Timing Constraints file with a period of 83.333 nanoseconds and a frequency of 12 MHz: The timing constraints file only contains a single line of code specifying the clock speed: Then I selected Run Place and Route. That's the button with four colored squares that looks a bit like the Windows icon. I got a bunch of Generate file completed messages in the terminal, which seemed promising: If you look again at the image above, you can see the mouse hovering over the Programmer icon, which is a green downward arrow. I now pressed it and hoped for the best. I have no idea what I am doing. The programmer found a USB device, so I clicked Program/Configure. Here the tutorial ends, but I still needed to make some selections. I found the manual for the Programmer in the Gowin software directory and followed instructions that told me to select Embedded flash mode and select the .fs programming file in the dialog: When I clicked Save I got an error: This seems like a simple error, all I have to do is select the right chip. I tried selecting all the chips that have similar names to mine (GWIN-4, GWIN4B, GWIN-4D and all their versions). I had to select the chip both in the Floor Planner and in the Programmer. But nothing worked. I don't know where to go from here, so I'll stop. .md-content__button { display: none; }","title":"How"},{"location":"assignments/week16.html","text":"Applications and Implications Here's the proposal for my final project: What will it do? IT will be a small, light and nimble SCARA-type robot arm with a structure made of PCBs (with 3D printed additions to stiffen the structure) and powered by brushless gimbal motors with angle sensors. It will be a motion platform that I will add end effectors to later. Its purpose is to teach robotics and perhaps automate some small tasks. Who has done what beforehand? 1 2 3 4 5 6 7 8 9 10 11 12 This robot arm sketch by Masoud Akbarzadeh inspired the shape of spiral 1 of my robot arm joint. Avishek Das made a small RC servo robot arm with a really nice simulation interface. I just don't like the jerkiness and imprecision of RC servos. Dan Chen made a clean-looking modular robot that can grab sushi. It's also based on RC servos. A group at Fab Lab Oulu made a cardboard robot arm in machine week in 2020. It's also based on RC servos. Hiroaki Kimura made a simple RC-servo arm and controlled it with a smaller arm with potentiometers in the joints. Kenny Phay Ngiap Peng made a stepper and RC-servo robot arm. A group at Fab Lab IED Madrid made a stepper-based SCARA arm in Machine Week in 2019. A group at Super Fab Lab Kochi made a nice stepper based SCARA arm in Machine week 2023. Jules Topart made a promising robot joint actuator. Xiaomeng Liu made a very nice iPhone camera gimbal with brushless motors. These are the actuators I want to work with and this is the smoothness of motion that I want. Light and nimble, like a gimbal! Christian Schmidt made a very compact GoPro camera gimbal but it was a little jittery. Adam B\u00e4ckstr\u00f6m hacked hobby servos in an amazing way and created a super-precise robot arm. What will you design? A robot joint controller board that I can replicate to make all the joints on the robot. I'll also design a 3D printed part that hides all the wires. What materials and components will be used, where will they come from, how much will they cost? Part Part no. Amount Price (USD) Total (USD) Link GM2804 Gimbal Motor w/Encoder G006983_2 3 38.99 116.97 https://shop.iflight-rc.com/ipower-gm2804-gimbal-motor-with-as5048a-encoder-pro288 Gimbal Motor Slipring - OD 6.5mm G006983_2 2 15.99 31.98 https://shop.iflight-rc.com/6-5mm-slipring-for-ipower-motor-gm2804-gimbal-motor-pro302?search=Gimbal%20Motor%20Slipring%20-%20OD%206.5mm Yoctopuce Micro-USB-Hub-V2 MHUB0002 1 32.4 32.4 https://www.yoctopuce.com/EN/products/extensions-and-networking/micro-usb-hub-v2 Pololu Adjustable 4-12V Step-Up/Step-Down Voltage Regulator S18V20ALV 1 37.95 37.95 https://www.pololu.com/product/2572 ATSAMD21E18A-AUT ATSAMD21E18A-AUTTR-ND 3 4.23 12.69 https://www.digikey.com/en/products/detail/microchip-technology/ATSAMD21E18A-AUT/4878871 DRV8313PWP 296-35540-5-ND 3 4.81 14.43 https://www.digikey.com/en/products/detail/texas-instruments/DRV8313PWP/3790947 PLA filament 10 Total 256.42 What parts and systems will be made? I will make the control boards and 3D printed structure. What processes will be used? PCB milling, 3D printing. What questions need to be answered? Can I find an Arduino core for the SAMD21 that both the OSAP and SimpleFOC libraries can compile to? Can these two libraries coexist on the same microcontroller? Can I get the closed loop control working? Can I tune the PID? Can I mill the fine traces for the SAMD21 microcontroller and the DRV8313 brushless motor driver? Can I design such a complicated board? How will it be evaluated? If I manage to get one robot joint working, then I consider the final project a success. I will keep working on the robot after the final presentation. .md-content__button { display: none; }","title":"16. Applications and Implications"},{"location":"assignments/week16.html#applications-and-implications","text":"Here's the proposal for my final project:","title":"Applications and Implications   "},{"location":"assignments/week16.html#what-will-it-do","text":"IT will be a small, light and nimble SCARA-type robot arm with a structure made of PCBs (with 3D printed additions to stiffen the structure) and powered by brushless gimbal motors with angle sensors. It will be a motion platform that I will add end effectors to later. Its purpose is to teach robotics and perhaps automate some small tasks.","title":"What will it do?"},{"location":"assignments/week16.html#who-has-done-what-beforehand","text":"1 2 3 4 5 6 7 8 9 10 11 12 This robot arm sketch by Masoud Akbarzadeh inspired the shape of spiral 1 of my robot arm joint. Avishek Das made a small RC servo robot arm with a really nice simulation interface. I just don't like the jerkiness and imprecision of RC servos. Dan Chen made a clean-looking modular robot that can grab sushi. It's also based on RC servos. A group at Fab Lab Oulu made a cardboard robot arm in machine week in 2020. It's also based on RC servos. Hiroaki Kimura made a simple RC-servo arm and controlled it with a smaller arm with potentiometers in the joints. Kenny Phay Ngiap Peng made a stepper and RC-servo robot arm. A group at Fab Lab IED Madrid made a stepper-based SCARA arm in Machine Week in 2019. A group at Super Fab Lab Kochi made a nice stepper based SCARA arm in Machine week 2023. Jules Topart made a promising robot joint actuator. Xiaomeng Liu made a very nice iPhone camera gimbal with brushless motors. These are the actuators I want to work with and this is the smoothness of motion that I want. Light and nimble, like a gimbal! Christian Schmidt made a very compact GoPro camera gimbal but it was a little jittery. Adam B\u00e4ckstr\u00f6m hacked hobby servos in an amazing way and created a super-precise robot arm.","title":"Who has done what beforehand?"},{"location":"assignments/week16.html#what-will-you-design","text":"A robot joint controller board that I can replicate to make all the joints on the robot. I'll also design a 3D printed part that hides all the wires.","title":"What will you design?"},{"location":"assignments/week16.html#what-materials-and-components-will-be-used-where-will-they-come-from-how-much-will-they-cost","text":"Part Part no. Amount Price (USD) Total (USD) Link GM2804 Gimbal Motor w/Encoder G006983_2 3 38.99 116.97 https://shop.iflight-rc.com/ipower-gm2804-gimbal-motor-with-as5048a-encoder-pro288 Gimbal Motor Slipring - OD 6.5mm G006983_2 2 15.99 31.98 https://shop.iflight-rc.com/6-5mm-slipring-for-ipower-motor-gm2804-gimbal-motor-pro302?search=Gimbal%20Motor%20Slipring%20-%20OD%206.5mm Yoctopuce Micro-USB-Hub-V2 MHUB0002 1 32.4 32.4 https://www.yoctopuce.com/EN/products/extensions-and-networking/micro-usb-hub-v2 Pololu Adjustable 4-12V Step-Up/Step-Down Voltage Regulator S18V20ALV 1 37.95 37.95 https://www.pololu.com/product/2572 ATSAMD21E18A-AUT ATSAMD21E18A-AUTTR-ND 3 4.23 12.69 https://www.digikey.com/en/products/detail/microchip-technology/ATSAMD21E18A-AUT/4878871 DRV8313PWP 296-35540-5-ND 3 4.81 14.43 https://www.digikey.com/en/products/detail/texas-instruments/DRV8313PWP/3790947 PLA filament 10 Total 256.42","title":"What materials and components will be used, where will they come from, how much will they cost?"},{"location":"assignments/week16.html#what-parts-and-systems-will-be-made","text":"I will make the control boards and 3D printed structure.","title":"What parts and systems will be made?"},{"location":"assignments/week16.html#what-processes-will-be-used","text":"PCB milling, 3D printing.","title":"What processes will be used?"},{"location":"assignments/week16.html#what-questions-need-to-be-answered","text":"Can I find an Arduino core for the SAMD21 that both the OSAP and SimpleFOC libraries can compile to? Can these two libraries coexist on the same microcontroller? Can I get the closed loop control working? Can I tune the PID? Can I mill the fine traces for the SAMD21 microcontroller and the DRV8313 brushless motor driver? Can I design such a complicated board?","title":"What questions need to be answered?"},{"location":"assignments/week16.html#how-will-it-be-evaluated","text":"If I manage to get one robot joint working, then I consider the final project a success. I will keep working on the robot after the final presentation. .md-content__button { display: none; }","title":"How will it be evaluated?"},{"location":"assignments/week17.html","text":"Invention, Intellectual Property and Business Models License At the bottom of every page on this website, I claim my copyright over the work that I\u00b4ve done in the Fab Academy. I want to choose a permissive license for the work, all I want is to be mentioned if you use part of it in your own project. I thought about one of the Creative Commons licenses, but then I found that Creative Commons don't recommend their licenses for software or hardware. Others that may apply are the MIT license for software and the CERN open hardware license. This is a jungle, and I'm a bit confused. And if I choose a licence, I'll need to include it with all my design files. I don't have time for that right now. Maybe the \" Fab Lab license \" that Neil Gershenfeld puts in all his software files on the Fab Academy website would be a good license for my work. But again, I am already stretched to my limits trying to finish all the assignments and the documentation; I can't also go back and modify all the design files to include a license. I can only work from nine in the morning to midnight for so long. I need to see my family at some point. So for now, the work is copyright, and all rights are reserved. I will revisit this in the near future (I'll review choosealicense.com and Open Hardware Licenses ) and see how I can best open the work up for others to use. Feel free to contact me at \"svavar at fabisa dot is\" to get permission to use the stuff I've made. Funding plan I'm not going to start a company around this little robot. But I have applied for and received two grants to develop it. Grant #1 I applied for the first grant three months before the Fab Academy started. I had been thinking about final projects for a full year, because I was so excited about entering the Fab Academy. I really wanted to make something cool. Then when the deadline for the Icelandic Technology Development Fund rolled around, I had done quite a bit of thinking and I used that thinking to send in an application for the smallest grant of one million ISK (about $7000). And I got it! I used that grant to buy all the parts that I thought I would need for the arm, including molding and casting supplies to make precise gearboxes. The parts that I bought came in very handy, not all of them, but the rest will be useful to future Fab Academy students. Grant #2 The second grant was from the Icelandic Student Innovation Fund. I got funds to employ a university student for the summer to develop an interface for the robot in the Robot Operating System (ROS). That student is Gu\u00f0j\u00f3n Bergmann, a friend from engineering school. Many of his buddies at TU Delft are looking into ROS, so this project will be good for him. Fortunately I've managed to build the arm just in time for his summer project to start. Dissemination plan I've thought a lot about how to make my robot arm. But I haven't thought much about how to make sure that it reaches its intended user group. Who is its intended user? Someone like me who is interested in robotics, I guess? High school and university students, engineers, technology enthusiasts, computer science majors who want to program physical things and make them move? Who are they and where do they hang out online? How do I reach these people? I don't know. That's the part I'm not very good at. Do I set up a website? I guess I could let people in the Fab Lab community know about the project and see if anyone wants to buy an arm to use in their classes. But this is quite a tricky board to mill. And I could show the robot in the SimpleFOC community, except there everybody wants to build their own robot in their own style from scratch. I have a vague dream of asking Seeed Studio if they would be interested in selling populated boards in their online store as an easy way to get into robotics. I don't know. Maybe if the project gets featured in an article on Hackaday . Yes, that might be the right audience. Presentation files I made a final project presentation slide and video under Presentation . .md-content__button { display: none; }","title":"17. Invention, Intellectual Property and Business Models"},{"location":"assignments/week17.html#invention-intellectual-property-and-business-models","text":"","title":"Invention, Intellectual Property and Business Models   "},{"location":"assignments/week17.html#license","text":"At the bottom of every page on this website, I claim my copyright over the work that I\u00b4ve done in the Fab Academy. I want to choose a permissive license for the work, all I want is to be mentioned if you use part of it in your own project. I thought about one of the Creative Commons licenses, but then I found that Creative Commons don't recommend their licenses for software or hardware. Others that may apply are the MIT license for software and the CERN open hardware license. This is a jungle, and I'm a bit confused. And if I choose a licence, I'll need to include it with all my design files. I don't have time for that right now. Maybe the \" Fab Lab license \" that Neil Gershenfeld puts in all his software files on the Fab Academy website would be a good license for my work. But again, I am already stretched to my limits trying to finish all the assignments and the documentation; I can't also go back and modify all the design files to include a license. I can only work from nine in the morning to midnight for so long. I need to see my family at some point. So for now, the work is copyright, and all rights are reserved. I will revisit this in the near future (I'll review choosealicense.com and Open Hardware Licenses ) and see how I can best open the work up for others to use. Feel free to contact me at \"svavar at fabisa dot is\" to get permission to use the stuff I've made.","title":"License"},{"location":"assignments/week17.html#funding-plan","text":"I'm not going to start a company around this little robot. But I have applied for and received two grants to develop it.","title":"Funding plan"},{"location":"assignments/week17.html#grant-1","text":"I applied for the first grant three months before the Fab Academy started. I had been thinking about final projects for a full year, because I was so excited about entering the Fab Academy. I really wanted to make something cool. Then when the deadline for the Icelandic Technology Development Fund rolled around, I had done quite a bit of thinking and I used that thinking to send in an application for the smallest grant of one million ISK (about $7000). And I got it! I used that grant to buy all the parts that I thought I would need for the arm, including molding and casting supplies to make precise gearboxes. The parts that I bought came in very handy, not all of them, but the rest will be useful to future Fab Academy students.","title":"Grant #1"},{"location":"assignments/week17.html#grant-2","text":"The second grant was from the Icelandic Student Innovation Fund. I got funds to employ a university student for the summer to develop an interface for the robot in the Robot Operating System (ROS). That student is Gu\u00f0j\u00f3n Bergmann, a friend from engineering school. Many of his buddies at TU Delft are looking into ROS, so this project will be good for him. Fortunately I've managed to build the arm just in time for his summer project to start.","title":"Grant #2"},{"location":"assignments/week17.html#dissemination-plan","text":"I've thought a lot about how to make my robot arm. But I haven't thought much about how to make sure that it reaches its intended user group. Who is its intended user? Someone like me who is interested in robotics, I guess? High school and university students, engineers, technology enthusiasts, computer science majors who want to program physical things and make them move? Who are they and where do they hang out online? How do I reach these people? I don't know. That's the part I'm not very good at. Do I set up a website? I guess I could let people in the Fab Lab community know about the project and see if anyone wants to buy an arm to use in their classes. But this is quite a tricky board to mill. And I could show the robot in the SimpleFOC community, except there everybody wants to build their own robot in their own style from scratch. I have a vague dream of asking Seeed Studio if they would be interested in selling populated boards in their online store as an easy way to get into robotics. I don't know. Maybe if the project gets featured in an article on Hackaday . Yes, that might be the right audience.","title":"Dissemination plan"},{"location":"assignments/week17.html#presentation-files","text":"I made a final project presentation slide and video under Presentation . .md-content__button { display: none; }","title":"Presentation files"},{"location":"assignments/week18.html","text":"Project Development The final project plan. What tasks have been completed? I managed to put together one robot joint and test it successfully. I'm really glad that I got this far. Because my teaching duties were over for the semester, I was finally able to document as I went along. I found that enjoyable, and the final project documentation is much better for it. I will definitely aim for real-time documentation in my projects from now on. I approached the project in spirals, just trying to make one robot joint work. In the first spiral I took the Stepper RP2040 Modular Thing, changed its shape and added a few things. I did a lot of testing and debugging and found that the Toshiba TB67H451FNG motor drivers are not a good fit for brushless motors. That was quite a big blow, because I had spent a lot of time designing that board. But Rico Kanthatham urged us to make the major mistakes in our final project early, and I had done that. I did spiral 1 of my final project in Output Devices week (and two more weeks after that). So I still had time to find another motor driver and start the electronics design again from scratch. The evolution of my PCB design skills can be seen in these four boards: My PCB design journey. Right-click the image and select \"Open image in new tab\" to see the boards better. My first board was very simple; it had just a Xiao module, an LED with a current-limiting resistor and a button. It was a good first design project and I was so happy when I got the LED blinking! My first major design was the machine controller for the Teamanator , the Terminator-inspired tea machine that Andri , Hafey and myself finished successfully during an intense weekend in Fab Lab Reykjav\u00edk. My second major design was spiral 1 of my robot joint, which embeds a Xiao module onto the board. In spiral 2 I wanted to go further, so I embedded the SAMD21 chip directly onto the board, which I think is much cooler. I had the confidence to do that because I had played around with the RGBB Modular Thing and managed to program it with the Atmel-ICE. I think I've come a long way, because I had not designed an electronic circuit before starting the Fab Academy. What tasks remain? I need to connect the second robot joint and try operating them together. The robot is also missing a Z-axis (up and down). While I've been finishing up the documentation of the weekly assignments, I've been thinking about ways to make a clean-looking Z-axis. I think I have a good solution, but it requires a hack (see \"What questions need to be resolved?\"). I also need to make some kind of an end-effector. My instructor \u00de\u00f3rarinn helped me make an electromagnet , but in the final push, I had to do triage and decide which parts of the project needed to be abandoned and which ones I might finish. I put all my energy into getting the motor control board made and assembled with all the wires hidden. What has worked? What hasn't? Everything I've tested in the final project works. I made sure of that before I assembled it. It was a bit of a risk to wait so long before actually fabricating anything. In the last Global Open Time meeting on the Saturday before I presented, I was milling the first board for my final project, and my final project still looked like this: Meme from \u00de\u00f3rarinn. The actual image is the first one in \u00de\u00f3rarinn's meme at the bottom of this page. This was my second major attempt at milling this board. The first time I used a 0.01\" milling bit, which broke after an hour of milling, in the middle of the night. You can see my reaction to that in my presentation video . Then I tried a V-bit, which is much stronger and more durable, but if you don't get the Z-height exactly right, the track that it mills will either be too thin or to thick. This may result in traces disappearing. While Global Open Time was still in session, the Modela MDX-20 finished milling with the V-bit: The moment I realized that I could finish my final project. I went to the video conferencing screen in the lab with my hands up in victory, and they guys celebrated with me. Look at those beautiful traces. As for the outcome of the final project: I love how you can see all the electronic components and yet the robot has a clean look and all the wires are hidden. All of them! That took a LOT of system integration work. The robot actually looks pretty much exactly like I envisioned it. However, assembling the robot was hell. Wires kept breaking and it was difficult to get them into the 3D printed part. I wouldn't want to do it like this again. This was spiral 2. In spiral 3, I want to design a multilayer PCB that contains all the wires that are currently hidden inside the 3D printed part. Then I can realize my original vision of a robot that is made of motors and PCB and nothing else! And it will still have a clean look! The prospect is exciting, but my goodness, this project is so much work. I really need a good summer holiday. Also, pretty much all the software interface work remains. I did receive a grant for my engineering buddy Gu\u00f0j\u00f3n Bergmann to do some work on that. What questions need to be resolved? Magnetic angle sensor placed off-axis. The PID loop still worked. If I move it a tiny bit further away from the middle, the motor goes haywire. Can I place the magnetic angle sensor far enough from the axis so that I can put a Z-axis leadscrew through the hole in the middle of the brushless motor? I've done a preliminary test with me holding the sensor off-axis, and it may work. What will happen when? Note about the final push from \u00de\u00f3rarinn. We signed it and everything. It's sitting on the espresso corner table that I made in Machining week . The espresso was quite well extracted (here's another reference image ). Gu\u00f0j\u00f3n Bergmann , who is studying Aerospace Engineering at TU Delft, will make a mockup of the software interface, and possibly do some real-world testing this summer. His work will finish in the beginning of August. I don't know when I will have time to do more work on this project myself, but I really want to get my \"only motors and PCBs\" robot made at a PCB house. That would be so cool. But right now I'm on my last chance to write a Master's thesis in Mechanical Engineering this winter. So I'll probably focus on that in the fall of 2023. Gu\u00f0j\u00f3n Bergmann. What have you learned? Demand-side time management is a really important part of the Fab Academy, because if you just think about what you would like to accomplish and line those tasks up linearly, you will run out of time. The project must be designed with the time and energy you have in mind. Spiral development is also very useful to me. My instinct is to try to get all the way to a finished product in one giant leap, but spiral development, where you complete a rough and simple version first and then see if you have time for another complete spiral with more features, has much better results. As \u00de\u00f3rarinn said after I presented my final project, Fab Academy is really one big lesson in project management. However, I've never used Gantt charts or other detailed project management methods, because I like working in research and development, and there you never know where the road leads you or how long it's going to take. It's often possible to mock up a technical idea in a day or a weekend that you've been thinking about for years, and finally see if it's good or not. I've tried many things during the Fab Academy that I've been thinking about for a long time. And they're not as intimidating as they seemed. I've discovered the joy of distributed work on an open source community project (the Frankenstein MCU ). Look! I figured out a way to add Zoom video conferencing to the FMCU and made nice instructions on how to run that version locally, for the other developers. This is my first time doing this sort of thing and I really like it. I've discovered a world of collaborators in the Fab Lab network. My instructor \u00de\u00f3rarinn has been very supportive throughout this journey and taught me many things, especially relating to electronincs. He is the resident meme master in the Fab Academy chat on Mattermost. Seriously, I think half of the memes on the meme channel came from him. He even made a meme just for me by rearranging the image series on my ideas page: .md-content__button { display: none; }","title":"18-20. Project Development"},{"location":"assignments/week18.html#project-development","text":"The final project plan.","title":"Project Development   "},{"location":"assignments/week18.html#what-tasks-have-been-completed","text":"I managed to put together one robot joint and test it successfully. I'm really glad that I got this far. Because my teaching duties were over for the semester, I was finally able to document as I went along. I found that enjoyable, and the final project documentation is much better for it. I will definitely aim for real-time documentation in my projects from now on. I approached the project in spirals, just trying to make one robot joint work. In the first spiral I took the Stepper RP2040 Modular Thing, changed its shape and added a few things. I did a lot of testing and debugging and found that the Toshiba TB67H451FNG motor drivers are not a good fit for brushless motors. That was quite a big blow, because I had spent a lot of time designing that board. But Rico Kanthatham urged us to make the major mistakes in our final project early, and I had done that. I did spiral 1 of my final project in Output Devices week (and two more weeks after that). So I still had time to find another motor driver and start the electronics design again from scratch. The evolution of my PCB design skills can be seen in these four boards: My PCB design journey. Right-click the image and select \"Open image in new tab\" to see the boards better. My first board was very simple; it had just a Xiao module, an LED with a current-limiting resistor and a button. It was a good first design project and I was so happy when I got the LED blinking! My first major design was the machine controller for the Teamanator , the Terminator-inspired tea machine that Andri , Hafey and myself finished successfully during an intense weekend in Fab Lab Reykjav\u00edk. My second major design was spiral 1 of my robot joint, which embeds a Xiao module onto the board. In spiral 2 I wanted to go further, so I embedded the SAMD21 chip directly onto the board, which I think is much cooler. I had the confidence to do that because I had played around with the RGBB Modular Thing and managed to program it with the Atmel-ICE. I think I've come a long way, because I had not designed an electronic circuit before starting the Fab Academy.","title":"What tasks have been completed?"},{"location":"assignments/week18.html#what-tasks-remain","text":"I need to connect the second robot joint and try operating them together. The robot is also missing a Z-axis (up and down). While I've been finishing up the documentation of the weekly assignments, I've been thinking about ways to make a clean-looking Z-axis. I think I have a good solution, but it requires a hack (see \"What questions need to be resolved?\"). I also need to make some kind of an end-effector. My instructor \u00de\u00f3rarinn helped me make an electromagnet , but in the final push, I had to do triage and decide which parts of the project needed to be abandoned and which ones I might finish. I put all my energy into getting the motor control board made and assembled with all the wires hidden.","title":"What tasks remain?"},{"location":"assignments/week18.html#what-has-worked-what-hasnt","text":"Everything I've tested in the final project works. I made sure of that before I assembled it. It was a bit of a risk to wait so long before actually fabricating anything. In the last Global Open Time meeting on the Saturday before I presented, I was milling the first board for my final project, and my final project still looked like this: Meme from \u00de\u00f3rarinn. The actual image is the first one in \u00de\u00f3rarinn's meme at the bottom of this page. This was my second major attempt at milling this board. The first time I used a 0.01\" milling bit, which broke after an hour of milling, in the middle of the night. You can see my reaction to that in my presentation video . Then I tried a V-bit, which is much stronger and more durable, but if you don't get the Z-height exactly right, the track that it mills will either be too thin or to thick. This may result in traces disappearing. While Global Open Time was still in session, the Modela MDX-20 finished milling with the V-bit: The moment I realized that I could finish my final project. I went to the video conferencing screen in the lab with my hands up in victory, and they guys celebrated with me. Look at those beautiful traces. As for the outcome of the final project: I love how you can see all the electronic components and yet the robot has a clean look and all the wires are hidden. All of them! That took a LOT of system integration work. The robot actually looks pretty much exactly like I envisioned it. However, assembling the robot was hell. Wires kept breaking and it was difficult to get them into the 3D printed part. I wouldn't want to do it like this again. This was spiral 2. In spiral 3, I want to design a multilayer PCB that contains all the wires that are currently hidden inside the 3D printed part. Then I can realize my original vision of a robot that is made of motors and PCB and nothing else! And it will still have a clean look! The prospect is exciting, but my goodness, this project is so much work. I really need a good summer holiday. Also, pretty much all the software interface work remains. I did receive a grant for my engineering buddy Gu\u00f0j\u00f3n Bergmann to do some work on that.","title":"What has worked? What hasn't?"},{"location":"assignments/week18.html#what-questions-need-to-be-resolved","text":"Magnetic angle sensor placed off-axis. The PID loop still worked. If I move it a tiny bit further away from the middle, the motor goes haywire. Can I place the magnetic angle sensor far enough from the axis so that I can put a Z-axis leadscrew through the hole in the middle of the brushless motor? I've done a preliminary test with me holding the sensor off-axis, and it may work.","title":"What questions need to be resolved?"},{"location":"assignments/week18.html#what-will-happen-when","text":"Note about the final push from \u00de\u00f3rarinn. We signed it and everything. It's sitting on the espresso corner table that I made in Machining week . The espresso was quite well extracted (here's another reference image ). Gu\u00f0j\u00f3n Bergmann , who is studying Aerospace Engineering at TU Delft, will make a mockup of the software interface, and possibly do some real-world testing this summer. His work will finish in the beginning of August. I don't know when I will have time to do more work on this project myself, but I really want to get my \"only motors and PCBs\" robot made at a PCB house. That would be so cool. But right now I'm on my last chance to write a Master's thesis in Mechanical Engineering this winter. So I'll probably focus on that in the fall of 2023. Gu\u00f0j\u00f3n Bergmann.","title":"What will happen when?"},{"location":"assignments/week18.html#what-have-you-learned","text":"Demand-side time management is a really important part of the Fab Academy, because if you just think about what you would like to accomplish and line those tasks up linearly, you will run out of time. The project must be designed with the time and energy you have in mind. Spiral development is also very useful to me. My instinct is to try to get all the way to a finished product in one giant leap, but spiral development, where you complete a rough and simple version first and then see if you have time for another complete spiral with more features, has much better results. As \u00de\u00f3rarinn said after I presented my final project, Fab Academy is really one big lesson in project management. However, I've never used Gantt charts or other detailed project management methods, because I like working in research and development, and there you never know where the road leads you or how long it's going to take. It's often possible to mock up a technical idea in a day or a weekend that you've been thinking about for years, and finally see if it's good or not. I've tried many things during the Fab Academy that I've been thinking about for a long time. And they're not as intimidating as they seemed. I've discovered the joy of distributed work on an open source community project (the Frankenstein MCU ). Look! I figured out a way to add Zoom video conferencing to the FMCU and made nice instructions on how to run that version locally, for the other developers. This is my first time doing this sort of thing and I really like it. I've discovered a world of collaborators in the Fab Lab network. My instructor \u00de\u00f3rarinn has been very supportive throughout this journey and taught me many things, especially relating to electronincs. He is the resident meme master in the Fab Academy chat on Mattermost. Seriously, I think half of the memes on the meme channel came from him. He even made a meme just for me by rearranging the image series on my ideas page: .md-content__button { display: none; }","title":"What have you learned?"},{"location":"final-project/3d_design.html","text":"3D Design Basic shape study. Since the arm is made of flat PCBs, I don't have many parameters to play with. Dimensions of the basic shape. I set the diameter of all the arcs to d3 , which is the diameter of the first arc. I change that and the whole model updates. I eventually decided to make all the joints the same shape, because it's simpler and because when the big arcs go inward, there isn't enough room on the board for all the components. So here's the final 3D design: Here's baksi, the spiral 2 version. Download baksi spiral 2 concept Fusion 360 model Download 3D print with internal channels Download base .md-content__button { display: none; }","title":"3D Design"},{"location":"final-project/3d_design.html#3d-design","text":"Basic shape study. Since the arm is made of flat PCBs, I don't have many parameters to play with. Dimensions of the basic shape. I set the diameter of all the arcs to d3 , which is the diameter of the first arc. I change that and the whole model updates. I eventually decided to make all the joints the same shape, because it's simpler and because when the big arcs go inward, there isn't enough room on the board for all the components. So here's the final 3D design: Here's baksi, the spiral 2 version. Download baksi spiral 2 concept Fusion 360 model Download 3D print with internal channels Download base .md-content__button { display: none; }","title":"3D Design   "},{"location":"final-project/effector.html","text":"End effector I asked my instructor \u00de\u00f3rarinn whether he could show me how to make an electromagnet, so that baks the robot arm could pick up small metal things. I expected to need to find a suitable iron core and wrap copper wire around it, and that it would be a half-day thing. I had no idea it could be a 5 minute thing! \u00de\u00f3rarinn got a choke coil from the electronics inventory, tried a few different voltage and current combinations, and voil\u00e1! We can lift a small bearing ball! Then we tried soldering three choke coils together to see if the holding strength would increase: It did, you can't shake the ball off! But the current requirements might be too high for a USB port-powered robot. .md-content__button { display: none; }","title":"End Effector"},{"location":"final-project/effector.html#end-effector","text":"I asked my instructor \u00de\u00f3rarinn whether he could show me how to make an electromagnet, so that baks the robot arm could pick up small metal things. I expected to need to find a suitable iron core and wrap copper wire around it, and that it would be a half-day thing. I had no idea it could be a 5 minute thing! \u00de\u00f3rarinn got a choke coil from the electronics inventory, tried a few different voltage and current combinations, and voil\u00e1! We can lift a small bearing ball! Then we tried soldering three choke coils together to see if the holding strength would increase: It did, you can't shake the ball off! But the current requirements might be too high for a USB port-powered robot. .md-content__button { display: none; }","title":"End effector   "},{"location":"final-project/electronics.html","text":"Electronics Design Since both the SimpleFOC motor control library and the OSAP network layer library were able to compile onto the SAMD21 chip by pretending that it's an Arduino Nano 33 IoT, that's what I'm going with. Now I need to go into the schematic of this Arduino to see its pin definitions: The Arduino 33 IoT schematic. Let's see what we have here. The SPI communication wires from the AS5048 magnetic angle sensor are as follows: black, pin 1, CSn blue, pin 2, CLK yellow, pin 4, MOSI green, pin 3, MISO red, pin 11, VDD5V white, pin 13, GND SPI connections to the breadboard Modular Thing. Let's start with MOSI. That's pin PA16 on the Arduino Nano 33 IoT. I'll put the yellow wire there on the breadboard Modular Thing. Then MISO is pin PA19. I'll put the green wire there. Through all this tinkering I've learned that CLK is the same as SCK. That's pin PA17 and the blue wire goes there. Csn, or the chip select pin, can be assigned to any digital pin. I'll put on PA18 (the black wire). Then all that's left is +3.3 volts and ground. And here's the test: I'm getting an angle reading in the serial monitor! How cool is that? OK, let's connect the motor driver. For that we need PWM. According to the schematic above, the PWM pins on the Arduino Nano 33 IoT are as follows: Arduino 2 3 5 6 9 10 11 12 16/A2 17/A3 19/A5 SAMD21 PB10 PB11 PA05 PA04 PA20 PA21 Not on schematic Not on schematic PA11 PA10 PB09 On the the breadboard Modular Thing I only have access to PA pins. Let's try PA4, PA5 and PA10 for PWM and PA6 as the enable pin. In the Arduino code I'll set pins 6, 5 and 17 to output PWM and let pin 7 be the enable pin. After uploading the angle_control.ino sketch, the motor twitched a little bit and I got the following messages in the serial monitor: MOT: Enable driver. MOT: Align sensor. MOT: Failed to notice movement MOT: Init FOC failed. Motor ready. I must have mixed up some of the motor driver pins. Let's add my LED testing board to the circuit: No PWM on the brown wire. The enable pin turns on first (white wire) and then PWM starts on the yellow and orange wires. The brown wire (the third BLDC phase) gets no PWM signal. Aha! I still have digital pin 9 in the code, which applies to PA20! That's how I first wrote the code, but I then discovered that PA20 isn't broken out on the breadboard Thing. Alright, I'll change the third PWM pin from 9 to 17, which matches PA10, and see what happens. Now I'm getting PWM signals on all three phases, but the motor shakes like crazy. I wonder if I accidentally wired the phases in the wrong order. Nope, that isn't helping. I'll try removing the LED test board, since I'm done with that for now. And that was all it took! I have a smooth, responsive servomotor controlled by a bare SAMD21 chip! I won't upload a video of that, because my repository is getting quite big. This is it. Now I can design my board. PCB schematic design I added some electronic parts to a blank schematic. I used the RGBB Modular Thing as a reference design, to see which passive parts the microcontroller needs. I want to have an RGB status LED on the robot's 'forearm' and also a button to disengage the motors and record movements. I can see that the Modular Things follow the SAMD21 datasheet and put a 10uF decoupling capacitor on VDDIN (the +3.3V input voltage) and a 1uF decoupling capacitor on VDDCORE (the 1.23V core logic voltage of the chip, provided by an internal regulator): I don't know what decoupling capacitors do, but it's nice to verify the design using this table in the microcontroller datasheet. Then I started on the motor controller. I used the SimpleFOC Mini as a reference design. One of the goals of the SimpleFOC Mini is to: Make it in a way to be a minimal working example for users that are interested to build their own boards based on the DRV8313 chip. -Antun Skuric, creator of the SimpleFOC library The SimpleFOC Mini is a two-layer board, but I want to make a one-layer board. Fortunately I'm not using all the pins on the motor controller, so I can simplify the design: The DRV8313 motor driver has a 3.3 V regulator to run a microcontroller, but it only supplies 10 mA, which is not enough for me. So I'll skip that pin. On second thought, after reading about the pins that I haven't been using, I'm changing my opinion. I would like to be able to The DRV8313 motor driver wasn't in the KiCAD library or in the fab library, but I found it on SnapEDA. But I quickly ran into a problem with it: Pins 4 and 11 are superimposed. They are both VM, but on the SimpleFOC Mini schematic, they are connected differently. So I needed to right click the symbol and select Edit with Symbol Editor. Fortunately, all I had to do there was to move one of the pins: I also moved the three superimposed GND pins apart, so that I could read the pin numbers. I used this layout example heavily when reviewing my design. PCB routing I had to put eight jumper resistors in order to route this board on one layer. I also route the USB, power and SPI wires through the 3D printed layer that is sandwiched between the two PCBs in each joint. I look forward to redesigning the board for manufacturing at a board house, with 2-4 layers. That should make things considerably easier. But I am glad that I managed to design a version of the board that can be made in any Fab Lab on a single-layer PCB, because not everyone is skilled at making double-layer PCBs (myself included). Let's just put a wire that goes on the back side of the PCB. No one will know. PCB design review Now let me go over the schematic and PCB layout and see if everything checks out: Microcontroller USB (I didn't check whether the data lines are equally long. Let's hope for the best!) 3.3V regulator (PCB layout didn't match schematic, fixed now) Button RGB LED JTAG programming connector and associated passive components (Oh no: SWDCLK trace is routed under JTAG connector! Looking at the JTAG connector, this seems impossible. Time for another jumper resistor.) SPI connections to the magnetic angle sensor Decoupling capacitors for the microcontroller Rest of the microcontroller connections Motor driver Motor power input Power indicator LED FAULT, RESET and SLEEP on the motor driver Bulk capacitor and bypass capacitors for motor (One bypass cap wasn't connected to ground! Fixed now.) PWM signal wires between microcontroller and motor driver ENABLE wires and resistor Rest of motor driver connections ( Two motor outputs routed under jumper resistor. Not good. The two VCC pins on the motor driver weren't connected together. They clearly need to be, according to the layout example. Fixed with a trace underneath the BLDC motor. Must remember to cut some vinyl to insulate the motor from this +10V power trace. The layout example has resistors going between ground pins. The SimpleFOC Mini schematic has no such resistors. It worked on the breadboard, so I'll skip the resistors. I've run out of room on the PCB! One more thing that I changed from the SimpleFOC schematic, is that I'm connecting COMPO to ground, as the motor driver datasheet shows. I also tried to connect NC to ground, but NC means Not Connected and it doesn't want to connect to anything. Both the datasheet and the SimpleFOC Mini schematic connect the +3.3V output from the motor driver to FAULT, RESET and SLEEP, to pull them up. I just don't have space for that. I'll just mill the PCB and if the 3.3V supply from the microcontroller doesn't do the trick then I'll just have to solder a wire to the board afterwards. Let's go!) I had to stop milling the board and start again, because I forgot one of the comments I made in the last item on the Motor driver checklist. A very important comment that I've now italicized. So I fixed the PCB layout as follows: I realized that after going through the checklist and making the necessary changes, I didn't need those two jumper resistors anymore. I was also able to move the VCC track out from underneath the motor. Final design The baksi robot joint schematic. The baksi robot joint PCB layout. PCB production My first attempt at milling the PCB failed, and I broke the smallest end mill (the 0.01\" one). I only have one left now. Looking back, I made the fundamental mistake of not making a small test of the most challenging aspect of the process. I'm going to try a V-bit now. When soldering the teeny tiny DRV8313 motor driver onto my beautiful board, I found that its legs are very thin and flexible, and so when you've fastened the driver to the big ground plane with a heat gun, you can bend the legs into place, as long as they're close to their intended copper pad. After soldering, I successfully put the bootloader on the SAMD21 chip and then programmed it with the SimpleFOC code. But I got no reading from the magnetic angle sensor and the serial monitor said that it detected no movement from the motor. The motor sounded weird. Download baks KiCAD project Download baks_joint2_traces.png Download baks_joint2_traces_exterior.png Download baks_joint2_holes_interior.png .md-content__button { display: none; }","title":"Electronics Design"},{"location":"final-project/electronics.html#electronics-design","text":"Since both the SimpleFOC motor control library and the OSAP network layer library were able to compile onto the SAMD21 chip by pretending that it's an Arduino Nano 33 IoT, that's what I'm going with. Now I need to go into the schematic of this Arduino to see its pin definitions: The Arduino 33 IoT schematic. Let's see what we have here. The SPI communication wires from the AS5048 magnetic angle sensor are as follows: black, pin 1, CSn blue, pin 2, CLK yellow, pin 4, MOSI green, pin 3, MISO red, pin 11, VDD5V white, pin 13, GND SPI connections to the breadboard Modular Thing. Let's start with MOSI. That's pin PA16 on the Arduino Nano 33 IoT. I'll put the yellow wire there on the breadboard Modular Thing. Then MISO is pin PA19. I'll put the green wire there. Through all this tinkering I've learned that CLK is the same as SCK. That's pin PA17 and the blue wire goes there. Csn, or the chip select pin, can be assigned to any digital pin. I'll put on PA18 (the black wire). Then all that's left is +3.3 volts and ground. And here's the test: I'm getting an angle reading in the serial monitor! How cool is that? OK, let's connect the motor driver. For that we need PWM. According to the schematic above, the PWM pins on the Arduino Nano 33 IoT are as follows: Arduino 2 3 5 6 9 10 11 12 16/A2 17/A3 19/A5 SAMD21 PB10 PB11 PA05 PA04 PA20 PA21 Not on schematic Not on schematic PA11 PA10 PB09 On the the breadboard Modular Thing I only have access to PA pins. Let's try PA4, PA5 and PA10 for PWM and PA6 as the enable pin. In the Arduino code I'll set pins 6, 5 and 17 to output PWM and let pin 7 be the enable pin. After uploading the angle_control.ino sketch, the motor twitched a little bit and I got the following messages in the serial monitor: MOT: Enable driver. MOT: Align sensor. MOT: Failed to notice movement MOT: Init FOC failed. Motor ready. I must have mixed up some of the motor driver pins. Let's add my LED testing board to the circuit: No PWM on the brown wire. The enable pin turns on first (white wire) and then PWM starts on the yellow and orange wires. The brown wire (the third BLDC phase) gets no PWM signal. Aha! I still have digital pin 9 in the code, which applies to PA20! That's how I first wrote the code, but I then discovered that PA20 isn't broken out on the breadboard Thing. Alright, I'll change the third PWM pin from 9 to 17, which matches PA10, and see what happens. Now I'm getting PWM signals on all three phases, but the motor shakes like crazy. I wonder if I accidentally wired the phases in the wrong order. Nope, that isn't helping. I'll try removing the LED test board, since I'm done with that for now. And that was all it took! I have a smooth, responsive servomotor controlled by a bare SAMD21 chip! I won't upload a video of that, because my repository is getting quite big. This is it. Now I can design my board.","title":"Electronics Design   "},{"location":"final-project/electronics.html#pcb-schematic-design","text":"I added some electronic parts to a blank schematic. I used the RGBB Modular Thing as a reference design, to see which passive parts the microcontroller needs. I want to have an RGB status LED on the robot's 'forearm' and also a button to disengage the motors and record movements. I can see that the Modular Things follow the SAMD21 datasheet and put a 10uF decoupling capacitor on VDDIN (the +3.3V input voltage) and a 1uF decoupling capacitor on VDDCORE (the 1.23V core logic voltage of the chip, provided by an internal regulator): I don't know what decoupling capacitors do, but it's nice to verify the design using this table in the microcontroller datasheet. Then I started on the motor controller. I used the SimpleFOC Mini as a reference design. One of the goals of the SimpleFOC Mini is to: Make it in a way to be a minimal working example for users that are interested to build their own boards based on the DRV8313 chip. -Antun Skuric, creator of the SimpleFOC library The SimpleFOC Mini is a two-layer board, but I want to make a one-layer board. Fortunately I'm not using all the pins on the motor controller, so I can simplify the design: The DRV8313 motor driver has a 3.3 V regulator to run a microcontroller, but it only supplies 10 mA, which is not enough for me. So I'll skip that pin. On second thought, after reading about the pins that I haven't been using, I'm changing my opinion. I would like to be able to The DRV8313 motor driver wasn't in the KiCAD library or in the fab library, but I found it on SnapEDA. But I quickly ran into a problem with it: Pins 4 and 11 are superimposed. They are both VM, but on the SimpleFOC Mini schematic, they are connected differently. So I needed to right click the symbol and select Edit with Symbol Editor. Fortunately, all I had to do there was to move one of the pins: I also moved the three superimposed GND pins apart, so that I could read the pin numbers. I used this layout example heavily when reviewing my design.","title":"PCB schematic design"},{"location":"final-project/electronics.html#pcb-routing","text":"I had to put eight jumper resistors in order to route this board on one layer. I also route the USB, power and SPI wires through the 3D printed layer that is sandwiched between the two PCBs in each joint. I look forward to redesigning the board for manufacturing at a board house, with 2-4 layers. That should make things considerably easier. But I am glad that I managed to design a version of the board that can be made in any Fab Lab on a single-layer PCB, because not everyone is skilled at making double-layer PCBs (myself included). Let's just put a wire that goes on the back side of the PCB. No one will know.","title":"PCB routing"},{"location":"final-project/electronics.html#pcb-design-review","text":"Now let me go over the schematic and PCB layout and see if everything checks out:","title":"PCB design review"},{"location":"final-project/electronics.html#microcontroller","text":"USB (I didn't check whether the data lines are equally long. Let's hope for the best!) 3.3V regulator (PCB layout didn't match schematic, fixed now) Button RGB LED JTAG programming connector and associated passive components (Oh no: SWDCLK trace is routed under JTAG connector! Looking at the JTAG connector, this seems impossible. Time for another jumper resistor.) SPI connections to the magnetic angle sensor Decoupling capacitors for the microcontroller Rest of the microcontroller connections","title":"Microcontroller"},{"location":"final-project/electronics.html#motor-driver","text":"Motor power input Power indicator LED FAULT, RESET and SLEEP on the motor driver Bulk capacitor and bypass capacitors for motor (One bypass cap wasn't connected to ground! Fixed now.) PWM signal wires between microcontroller and motor driver ENABLE wires and resistor Rest of motor driver connections ( Two motor outputs routed under jumper resistor. Not good. The two VCC pins on the motor driver weren't connected together. They clearly need to be, according to the layout example. Fixed with a trace underneath the BLDC motor. Must remember to cut some vinyl to insulate the motor from this +10V power trace. The layout example has resistors going between ground pins. The SimpleFOC Mini schematic has no such resistors. It worked on the breadboard, so I'll skip the resistors. I've run out of room on the PCB! One more thing that I changed from the SimpleFOC schematic, is that I'm connecting COMPO to ground, as the motor driver datasheet shows. I also tried to connect NC to ground, but NC means Not Connected and it doesn't want to connect to anything. Both the datasheet and the SimpleFOC Mini schematic connect the +3.3V output from the motor driver to FAULT, RESET and SLEEP, to pull them up. I just don't have space for that. I'll just mill the PCB and if the 3.3V supply from the microcontroller doesn't do the trick then I'll just have to solder a wire to the board afterwards. Let's go!) I had to stop milling the board and start again, because I forgot one of the comments I made in the last item on the Motor driver checklist. A very important comment that I've now italicized. So I fixed the PCB layout as follows: I realized that after going through the checklist and making the necessary changes, I didn't need those two jumper resistors anymore. I was also able to move the VCC track out from underneath the motor.","title":"Motor driver"},{"location":"final-project/electronics.html#final-design","text":"The baksi robot joint schematic. The baksi robot joint PCB layout.","title":"Final design"},{"location":"final-project/electronics.html#pcb-production","text":"My first attempt at milling the PCB failed, and I broke the smallest end mill (the 0.01\" one). I only have one left now. Looking back, I made the fundamental mistake of not making a small test of the most challenging aspect of the process. I'm going to try a V-bit now. When soldering the teeny tiny DRV8313 motor driver onto my beautiful board, I found that its legs are very thin and flexible, and so when you've fastened the driver to the big ground plane with a heat gun, you can bend the legs into place, as long as they're close to their intended copper pad. After soldering, I successfully put the bootloader on the SAMD21 chip and then programmed it with the SimpleFOC code. But I got no reading from the magnetic angle sensor and the serial monitor said that it detected no movement from the motor. The motor sounded weird. Download baks KiCAD project Download baks_joint2_traces.png Download baks_joint2_traces_exterior.png Download baks_joint2_holes_interior.png .md-content__button { display: none; }","title":"PCB production"},{"location":"final-project/embedded_programming.html","text":"Embedded programming Modular Things RGBB Thing To get into Modular Things, I first tried to get the rgbb board to work. I managed to put a bootloader onto the SAMD21E18A chip on the rgbb board and then I put the Fab SAM core on it (first I put a different core on it and the COM port disappeared). I connected it to the computer, opened up the web interface and after a bit of coding the red LED turns on when I press the button! The RGB diode was very dim, so my instructor \u00de\u00f3rarinn took to the electronics workbench, tested it, replaced it with a new LED and discovered that the current-limiting resistors were 10kOhm instead of the recommended 1kOhm. I got to try the soldering tweezers for the first time and boy, are they handy for removing components from a board! \u00de\u00f3rarinn lighting up the diodes with the bench power supply. You just grab the part with the hot tweezers and it comes off! \u00de\u00f3rarinn explained diodes on the whiteboard and I discovered that I've had anodes and diodes backwards all these years! No wonder I was never particularly good at calculating electrical circuits. Stepper Thing I was happy to get the rgbb board working, but then the stepper Modular Thing that I made suddenly stopped working. I sent Quentin Bols\u00e9e the image above and asked him if he knew what the errors meant. But before he could reply, I remembered something that \u00c1rni Bj\u00f6rnsson had showed me. Apparently, the pin assignments have changed in the latest Modular Things code for the RP2040 board. I reverted them back to the original ones, and the stepper worked! My BLDC Thing I first tried to make a BLDC Modular Thing using the instructions in the Modular Things repository. I got an error that I couldn't figure out. SimpleFOC The SimpleFOC motor control library works when I use Xiao SAMD21 modules, but when I design my boards, I'd like to use the bare SAMD21E18A chip. I ran into an issue compiling the motor control code to the bare chip and asked for help on the SimpleFOC community forum: Error compiling to SAMD21 (bare chip) - SimpleFOC Community I got a very helpful answer from @AdinAck. Adin made a brushless DC motor control board with a SAMD21 chip running the SimpleFOC library. When programming the chip, he told the Arduino IDE that he was programming an Adafruit Feather board. All he had to do was to design the board so that it conformed to the Feather, and everything worked! So I tried to upload the motor control code to the rgbb Modular Things board by telling the Arduino IDE that I was uploading to a MattAirTech Xeno Mini. No luck. Then an Arduino MKRZERO. That didn't work either. Then an Arduino MKR1000. Nope. I was just randomly trying different boards and finally tried the Arduino Nano 33 IoT. It worked! I even got serial output from it: All I get is errors and zeroes, because there is no motor and no angle sensor connected to the MCU. But I'm happy, because the code compiled! Then I checked if I could upload the Modular Thing code to the SAMD21 chip under the pretense that it was an Arduino Nano 33 IoT. That worked too! I even got a 'false' response from the button (in the bottom right corner). Aw yeah! Next, I soldered the necessary parts onto the breadboard Thing and connected it up to the LED test board that I made in Electronics Production week. I wrote a loop that blinks every pin on the IC and sends the corresponding pin number to the serial port. With this I was able to identify which pin in the Arduino Nano 33 IoT board definition applied to which pin on the IC itself. .md-content__button { display: none; }","title":"Embedded programming"},{"location":"final-project/embedded_programming.html#embedded-programming","text":"","title":"Embedded programming   "},{"location":"final-project/embedded_programming.html#modular-things","text":"","title":"Modular Things"},{"location":"final-project/embedded_programming.html#rgbb-thing","text":"To get into Modular Things, I first tried to get the rgbb board to work. I managed to put a bootloader onto the SAMD21E18A chip on the rgbb board and then I put the Fab SAM core on it (first I put a different core on it and the COM port disappeared). I connected it to the computer, opened up the web interface and after a bit of coding the red LED turns on when I press the button! The RGB diode was very dim, so my instructor \u00de\u00f3rarinn took to the electronics workbench, tested it, replaced it with a new LED and discovered that the current-limiting resistors were 10kOhm instead of the recommended 1kOhm. I got to try the soldering tweezers for the first time and boy, are they handy for removing components from a board! \u00de\u00f3rarinn lighting up the diodes with the bench power supply. You just grab the part with the hot tweezers and it comes off! \u00de\u00f3rarinn explained diodes on the whiteboard and I discovered that I've had anodes and diodes backwards all these years! No wonder I was never particularly good at calculating electrical circuits.","title":"RGBB Thing"},{"location":"final-project/embedded_programming.html#stepper-thing","text":"I was happy to get the rgbb board working, but then the stepper Modular Thing that I made suddenly stopped working. I sent Quentin Bols\u00e9e the image above and asked him if he knew what the errors meant. But before he could reply, I remembered something that \u00c1rni Bj\u00f6rnsson had showed me. Apparently, the pin assignments have changed in the latest Modular Things code for the RP2040 board. I reverted them back to the original ones, and the stepper worked!","title":"Stepper Thing"},{"location":"final-project/embedded_programming.html#my-bldc-thing","text":"I first tried to make a BLDC Modular Thing using the instructions in the Modular Things repository. I got an error that I couldn't figure out.","title":"My BLDC Thing"},{"location":"final-project/embedded_programming.html#simplefoc","text":"The SimpleFOC motor control library works when I use Xiao SAMD21 modules, but when I design my boards, I'd like to use the bare SAMD21E18A chip. I ran into an issue compiling the motor control code to the bare chip and asked for help on the SimpleFOC community forum: Error compiling to SAMD21 (bare chip) - SimpleFOC Community I got a very helpful answer from @AdinAck. Adin made a brushless DC motor control board with a SAMD21 chip running the SimpleFOC library. When programming the chip, he told the Arduino IDE that he was programming an Adafruit Feather board. All he had to do was to design the board so that it conformed to the Feather, and everything worked! So I tried to upload the motor control code to the rgbb Modular Things board by telling the Arduino IDE that I was uploading to a MattAirTech Xeno Mini. No luck. Then an Arduino MKRZERO. That didn't work either. Then an Arduino MKR1000. Nope. I was just randomly trying different boards and finally tried the Arduino Nano 33 IoT. It worked! I even got serial output from it: All I get is errors and zeroes, because there is no motor and no angle sensor connected to the MCU. But I'm happy, because the code compiled! Then I checked if I could upload the Modular Thing code to the SAMD21 chip under the pretense that it was an Arduino Nano 33 IoT. That worked too! I even got a 'false' response from the button (in the bottom right corner). Aw yeah! Next, I soldered the necessary parts onto the breadboard Thing and connected it up to the LED test board that I made in Electronics Production week. I wrote a loop that blinks every pin on the IC and sends the corresponding pin number to the serial port. With this I was able to identify which pin in the Arduino Nano 33 IoT board definition applied to which pin on the IC itself. .md-content__button { display: none; }","title":"SimpleFOC"},{"location":"final-project/ideas.html","text":"Final Project I have three ideas for a final project: A robot arm, a wake-up mask and a digital stirring stick that tells me when to press down with the French press to get a perfect brew. Let's go through them one by one, with illustrations made in MS Paint. Idea 1: baks the robot arm I want to make a robot arm. I realize that this is not a particularly original idea, but I just dig robot arms. I want to call it baks, which is the noun version of the Icelandic verb baksa, which means to 'busy oneself' or to 'be industrious', usually in the context of toddlers playing energetically outside, digging and dragging logs about and throwing rocks. Here's a video where I describe the final project to Neil Gershenfeld during random review. A bit of history I backed the original uArm on Kickstarter and enjoyed using it until I accidentally connected the power supply directly to the Arduino instead to the uArm Shield. The plan was to have it fry doughnuts and frost them but the arm ended up getting fried instead. The software part also intimidated me. In one of the courses in my Mechanical Engineering degree, the task was to pick up a wine glass and give the professor a sip. The rationale behind it was to think about solutions for people with Parkinson's disease. My group put a lot of work into making a robot arm with a soft 3D printed hand that I found on Thingiverse. We converted an old theatre light (if you find one, they're a treasure trove, this one had 13 stepper motors). I designed and 3D printed all the custom components and the arm worked . But then I wanted an arm that can do more than one thing. When Fab Lab \u00cdsafj\u00f6r\u00f0ur needed something to impress the President of Iceland and the First Lady, Kar\u00edtas at Fab Lab Sau\u00f0\u00e1rkr\u00f3kur was kind enough to loan us a Niryo One , a 3D printed educational robot arm based on steppers and Dynamixel servos. I programmed the robot to hand the First Lady a laser cut business card , and had great fun experimenting with the robot. Then I fried one of its servo motors when a cable connector got stuck and the robot kept trying to move to its intended position. I managed to swap out the motor, but I thought that surely there's an improvement to be made here. Since educational robot arms seem to be mostly used to move colored cubes around, I think it's OK to scale them down and simplify them to make them less expensive and more accessible. I'd like to base my arm on brushless motors without any gearing, and use springs to balance it, just like the Anglepoise lamp . Then it's nimble and can't damage anything. It won't be able to lift much, but that's OK! I only want it to lift a paint brush or a colored cube. Outline of the idea Since I have a history of frying robot arms, I have a few personal design goals for my arm. Cannot hurt itself or the user Easily programmed by moving the arm and saving keypoints and paths Small Cheap Precise enough to paint candles The last point warrants a little explanation. Kertah\u00fasi\u00f0 in \u00cdsafj\u00f6r\u00f0ur manufacture candles in the shape of known buildings in the town and now around the country. It was started by a couple who were on parental leave but the money wasn't enough to live on. So they came to \u00de\u00f3rarinn at Fab Lab \u00cdsafj\u00f6r\u00f0ur and he helped them mold and cast the first candle houses. He was able to do this because of his experience at the Fab Academy. This was only last year, but their product offering now counts dozens of different designs. Every time they make a new design, the model is 3D printed here at Fab Lab \u00cdsafj\u00f6r\u00f0ur and S\u00e6d\u00eds and Gunnar make a silicone mold in their workshop. It's been a great collaboration. But hand painting the candles is a lot of work, so I wonder if it's possible to make a small, inexpensive and easy to use robot arm to paint the candles for them. Idea 2: Wake-up mask I live in \u00cdsafj\u00f6r\u00f0ur, and that means that when writing this, I have not seen the sun for two months. Last fall I found that waking up with my younger son became significantly harder as the sun came out later in the day. Then the sun disappeared altogether. The Seasonal Affective Disorder acronym seems apt: the mood is SAD. I want to get more light in the dark winter months, especially to wake me up naturally in the morning. I absolutely love the artificial sun by DIY Perks and I want to make one, but I'm not sure where to put it. Maybe in a big floor lamp similar to the Arco , but then the liquid coolant would need to be piped all the way between the stand and the light. I'll keep that one in my little black book of ideas for now. Since my wife feeds the baby during the night, I take him upstairs when he wakes up early in the morning. That way we can share the sleeplessness. But this means that I can't use a sunrise alarm clock, because I don't want to wake up my wife and my older boy. I wonder if a wake-up mask could work. I could take a sleeping mask and add a few LEDs that shine into my eyes for about ten minutes before my alarm sounds. Use a WiFi enabled microcontroller to sync the sunrise to the alarm clock on my phone. That's probably the most difficult part, but it would make it easy to use. Idea 3: pH pal for the French Press On my parental leave last fall, I wondered if I could use a pH meter to tell me exactly when to press down to make perfect French Press coffee every time. From experience (and reading online) I knew that with too short a brewing time the coffee becomes acidic and when you brew for too long, the coffee becomes bitter. So to get a baseline reference, every morning with my baby boy, I logged the quality of my morning cup. French press log started July 30 2022 Acidic: Balanced: Bitter: (I made the tally marks in Inkscape, by the way.) I found that the coffee grounds get more acidic as they age. As I don't have a coffee grinder, I buy ground coffee in bags that last for three to four weeks. In this experiment I found that freshly ground coffee only seems to be delicious for as long as a loaf of bread is: one day. Two days max. Those days I got great cups of coffee. So the result was not the one I wanted: Freshly ground coffee is much more important than pressing down at the right time. I also found that pourover or Vietnamese drip is makes a much better brew out of stale coffee grounds. Quite nice, actually. So there goes that idea, but I did find a source of low cost, high quality pH sensors and amplifiers. Believe me, this is cheap for what you get. To measure pH with the precision that you need to tell bitter coffee (pH 5.10) from acidic coffee (pH 4.85), your sensor amplifier needs to be isolated and well designed. Continuing the train of thought about coffee pH I also had the brilliant idea of putting a little bit of baking soda (which is basic) into the cup to 'fix' the coffee when it turned out acidic. I made the worst cup of coffee in history. A few people have encouraged me to make a prototype of the pH pal. Maybe later! .md-content__button { display: none; }","title":"Ideas"},{"location":"final-project/ideas.html#final-project","text":"I have three ideas for a final project: A robot arm, a wake-up mask and a digital stirring stick that tells me when to press down with the French press to get a perfect brew. Let's go through them one by one, with illustrations made in MS Paint.","title":"Final Project   "},{"location":"final-project/ideas.html#idea-1-baks-the-robot-arm","text":"I want to make a robot arm. I realize that this is not a particularly original idea, but I just dig robot arms. I want to call it baks, which is the noun version of the Icelandic verb baksa, which means to 'busy oneself' or to 'be industrious', usually in the context of toddlers playing energetically outside, digging and dragging logs about and throwing rocks. Here's a video where I describe the final project to Neil Gershenfeld during random review.","title":"Idea 1: baks the robot arm"},{"location":"final-project/ideas.html#a-bit-of-history","text":"I backed the original uArm on Kickstarter and enjoyed using it until I accidentally connected the power supply directly to the Arduino instead to the uArm Shield. The plan was to have it fry doughnuts and frost them but the arm ended up getting fried instead. The software part also intimidated me. In one of the courses in my Mechanical Engineering degree, the task was to pick up a wine glass and give the professor a sip. The rationale behind it was to think about solutions for people with Parkinson's disease. My group put a lot of work into making a robot arm with a soft 3D printed hand that I found on Thingiverse. We converted an old theatre light (if you find one, they're a treasure trove, this one had 13 stepper motors). I designed and 3D printed all the custom components and the arm worked . But then I wanted an arm that can do more than one thing. When Fab Lab \u00cdsafj\u00f6r\u00f0ur needed something to impress the President of Iceland and the First Lady, Kar\u00edtas at Fab Lab Sau\u00f0\u00e1rkr\u00f3kur was kind enough to loan us a Niryo One , a 3D printed educational robot arm based on steppers and Dynamixel servos. I programmed the robot to hand the First Lady a laser cut business card , and had great fun experimenting with the robot. Then I fried one of its servo motors when a cable connector got stuck and the robot kept trying to move to its intended position. I managed to swap out the motor, but I thought that surely there's an improvement to be made here. Since educational robot arms seem to be mostly used to move colored cubes around, I think it's OK to scale them down and simplify them to make them less expensive and more accessible. I'd like to base my arm on brushless motors without any gearing, and use springs to balance it, just like the Anglepoise lamp . Then it's nimble and can't damage anything. It won't be able to lift much, but that's OK! I only want it to lift a paint brush or a colored cube.","title":"A bit of history"},{"location":"final-project/ideas.html#outline-of-the-idea","text":"Since I have a history of frying robot arms, I have a few personal design goals for my arm. Cannot hurt itself or the user Easily programmed by moving the arm and saving keypoints and paths Small Cheap Precise enough to paint candles The last point warrants a little explanation. Kertah\u00fasi\u00f0 in \u00cdsafj\u00f6r\u00f0ur manufacture candles in the shape of known buildings in the town and now around the country. It was started by a couple who were on parental leave but the money wasn't enough to live on. So they came to \u00de\u00f3rarinn at Fab Lab \u00cdsafj\u00f6r\u00f0ur and he helped them mold and cast the first candle houses. He was able to do this because of his experience at the Fab Academy. This was only last year, but their product offering now counts dozens of different designs. Every time they make a new design, the model is 3D printed here at Fab Lab \u00cdsafj\u00f6r\u00f0ur and S\u00e6d\u00eds and Gunnar make a silicone mold in their workshop. It's been a great collaboration. But hand painting the candles is a lot of work, so I wonder if it's possible to make a small, inexpensive and easy to use robot arm to paint the candles for them.","title":"Outline of the idea"},{"location":"final-project/ideas.html#idea-2-wake-up-mask","text":"I live in \u00cdsafj\u00f6r\u00f0ur, and that means that when writing this, I have not seen the sun for two months. Last fall I found that waking up with my younger son became significantly harder as the sun came out later in the day. Then the sun disappeared altogether. The Seasonal Affective Disorder acronym seems apt: the mood is SAD. I want to get more light in the dark winter months, especially to wake me up naturally in the morning. I absolutely love the artificial sun by DIY Perks and I want to make one, but I'm not sure where to put it. Maybe in a big floor lamp similar to the Arco , but then the liquid coolant would need to be piped all the way between the stand and the light. I'll keep that one in my little black book of ideas for now. Since my wife feeds the baby during the night, I take him upstairs when he wakes up early in the morning. That way we can share the sleeplessness. But this means that I can't use a sunrise alarm clock, because I don't want to wake up my wife and my older boy. I wonder if a wake-up mask could work. I could take a sleeping mask and add a few LEDs that shine into my eyes for about ten minutes before my alarm sounds. Use a WiFi enabled microcontroller to sync the sunrise to the alarm clock on my phone. That's probably the most difficult part, but it would make it easy to use.","title":"Idea 2: Wake-up mask"},{"location":"final-project/ideas.html#idea-3-ph-pal-for-the-french-press","text":"On my parental leave last fall, I wondered if I could use a pH meter to tell me exactly when to press down to make perfect French Press coffee every time. From experience (and reading online) I knew that with too short a brewing time the coffee becomes acidic and when you brew for too long, the coffee becomes bitter. So to get a baseline reference, every morning with my baby boy, I logged the quality of my morning cup.","title":"Idea 3: pH pal for the French Press"},{"location":"final-project/ideas.html#french-press-log-started-july-30-2022","text":"Acidic: Balanced: Bitter: (I made the tally marks in Inkscape, by the way.) I found that the coffee grounds get more acidic as they age. As I don't have a coffee grinder, I buy ground coffee in bags that last for three to four weeks. In this experiment I found that freshly ground coffee only seems to be delicious for as long as a loaf of bread is: one day. Two days max. Those days I got great cups of coffee. So the result was not the one I wanted: Freshly ground coffee is much more important than pressing down at the right time. I also found that pourover or Vietnamese drip is makes a much better brew out of stale coffee grounds. Quite nice, actually. So there goes that idea, but I did find a source of low cost, high quality pH sensors and amplifiers. Believe me, this is cheap for what you get. To measure pH with the precision that you need to tell bitter coffee (pH 5.10) from acidic coffee (pH 4.85), your sensor amplifier needs to be isolated and well designed. Continuing the train of thought about coffee pH I also had the brilliant idea of putting a little bit of baking soda (which is basic) into the cup to 'fix' the coffee when it turned out acidic. I made the worst cup of coffee in history. A few people have encouraged me to make a prototype of the pH pal. Maybe later! .md-content__button { display: none; }","title":"French press log started July 30 2022"},{"location":"final-project/integration.html","text":"System integration USB hub communication I was unsure how to connect all the robot joints together for a while. Jake Read shows I2C communication using his OSAP library. It would make for a nicely integrated whole, but the joints that have I2C connections might react more slowly than the one in the base, which would be directly connected to the full-speed USB port. So two weeks before the final presentation I ordered a tiny USB hub from a Swiss company called Yoctopuce. When it arrived I was eager to see if it worked and whether it was able to supply enough current to two motors at the same time. So I took my bike over to Hamraborg, bought a USB-C cable, cut it in half and soldered the wires to connectors that I ordered with the USB hub. And here I am running two BLDC motors from one USB port on my laptop (note that this is a special 3A port with a battery charging logo on it): Now the whole thing works on a breadboard. Whew! Power budget A bit later I realized that I hadn't checked whether that single USB-C port on my laptop could support all the motors and things that I wanted to build into the arm. So I bought another USB-C cable, cut it in half and connected more motors. And here I have three brushless motors and one stepper motor running on one USB-C port at the same time: Instead of the stepper motor I'll actually be using a tiny DC motor as a Z-axis, and I also need to power an end effector. Assembling one robot joint The night before my final project presentation, I assembled one robot joint and managed to hide all the wires. Here it is running a PID cascade control loop using the SimpleFOC library: .md-content__button { display: none; }","title":"System integration"},{"location":"final-project/integration.html#system-integration","text":"","title":"System integration   "},{"location":"final-project/integration.html#usb-hub-communication","text":"I was unsure how to connect all the robot joints together for a while. Jake Read shows I2C communication using his OSAP library. It would make for a nicely integrated whole, but the joints that have I2C connections might react more slowly than the one in the base, which would be directly connected to the full-speed USB port. So two weeks before the final presentation I ordered a tiny USB hub from a Swiss company called Yoctopuce. When it arrived I was eager to see if it worked and whether it was able to supply enough current to two motors at the same time. So I took my bike over to Hamraborg, bought a USB-C cable, cut it in half and soldered the wires to connectors that I ordered with the USB hub. And here I am running two BLDC motors from one USB port on my laptop (note that this is a special 3A port with a battery charging logo on it): Now the whole thing works on a breadboard. Whew!","title":"USB hub communication"},{"location":"final-project/integration.html#power-budget","text":"A bit later I realized that I hadn't checked whether that single USB-C port on my laptop could support all the motors and things that I wanted to build into the arm. So I bought another USB-C cable, cut it in half and connected more motors. And here I have three brushless motors and one stepper motor running on one USB-C port at the same time: Instead of the stepper motor I'll actually be using a tiny DC motor as a Z-axis, and I also need to power an end effector.","title":"Power budget"},{"location":"final-project/integration.html#assembling-one-robot-joint","text":"The night before my final project presentation, I assembled one robot joint and managed to hide all the wires. Here it is running a PID cascade control loop using the SimpleFOC library: .md-content__button { display: none; }","title":"Assembling one robot joint"},{"location":"final-project/interface.html","text":"Interface My own thing I tried the PhysicalPixel example that is built into the Arduino IDE and modified it to send commands to the motor. In the video below I am controlling the motors with Python through two serial ports at the same time. The left motor rotates twice and the right motor rotates once. I found a fantastic example of using WebSerial in Py-Script and tried to run it locally. I got the error: Access to Image from origin 'null' has been blocked by CORS policy I found that this is because the code can only be run on a server. I tried to set up a local server using web.py but that didn't work and then I tried uploading the example to my website but the MkDocs system dindn't allow me to access the page. Then I found this tutorial on how to deploy a Py-Script app to GitHub Pages. That was easy, it's just a matter of creating a repository, uploading the files and going into settings and enabling deployment: The repository itself is under the Code tab. Go into the Settings tab, select Pages in the left menu and under Build and Deployment select main , root and click Save. As long as the repository has a file called index.html, the site is now live! Here's a link to the two motor control interface: baks Here I'm connecting to a COM device and sending it the command T12, which means 'Turn by 12 radians', or just about two rotations: It works! I couldn't be more excited! I'm getting close to having the full stack of technologies working. And the interface is live on the web! I made some very rough modifications to the code and managed to connect to two COM ports and control two motors: Modular Things When I had the rgbb board and the stepper board working, I managed to make them interact with the following code (I named the stepper after myself): Svavar . setCurrentScale ( 0.3 ); Svavar . setVelocity ( 200 ); Svavar . setAccel ( 40 ); var val = 0 ; loop ( async () => { val = await led . getButtonState ( 0 ); console . log ( val ); led . setRGB ( val , val , val ); let pos = 0 ; if ( val == true ){ for ( let i = 0 ; i < 2 ; i ++ ) { pos = pos == 0 ? 5 : 0 ; await Svavar . absolute ( pos ); } } }, 50 ); .md-content__button { display: none; }","title":"Interface"},{"location":"final-project/interface.html#interface","text":"","title":"Interface   "},{"location":"final-project/interface.html#my-own-thing","text":"I tried the PhysicalPixel example that is built into the Arduino IDE and modified it to send commands to the motor. In the video below I am controlling the motors with Python through two serial ports at the same time. The left motor rotates twice and the right motor rotates once. I found a fantastic example of using WebSerial in Py-Script and tried to run it locally. I got the error: Access to Image from origin 'null' has been blocked by CORS policy I found that this is because the code can only be run on a server. I tried to set up a local server using web.py but that didn't work and then I tried uploading the example to my website but the MkDocs system dindn't allow me to access the page. Then I found this tutorial on how to deploy a Py-Script app to GitHub Pages. That was easy, it's just a matter of creating a repository, uploading the files and going into settings and enabling deployment: The repository itself is under the Code tab. Go into the Settings tab, select Pages in the left menu and under Build and Deployment select main , root and click Save. As long as the repository has a file called index.html, the site is now live! Here's a link to the two motor control interface: baks Here I'm connecting to a COM device and sending it the command T12, which means 'Turn by 12 radians', or just about two rotations: It works! I couldn't be more excited! I'm getting close to having the full stack of technologies working. And the interface is live on the web! I made some very rough modifications to the code and managed to connect to two COM ports and control two motors:","title":"My own thing"},{"location":"final-project/interface.html#modular-things","text":"When I had the rgbb board and the stepper board working, I managed to make them interact with the following code (I named the stepper after myself): Svavar . setCurrentScale ( 0.3 ); Svavar . setVelocity ( 200 ); Svavar . setAccel ( 40 ); var val = 0 ; loop ( async () => { val = await led . getButtonState ( 0 ); console . log ( val ); led . setRGB ( val , val , val ); let pos = 0 ; if ( val == true ){ for ( let i = 0 ; i < 2 ; i ++ ) { pos = pos == 0 ? 5 : 0 ; await Svavar . absolute ( pos ); } } }, 50 ); .md-content__button { display: none; }","title":"Modular Things"},{"location":"final-project/motor_control.html","text":"Motor Control Here I'm giving the motor commands from the Arduino serial monitor. I'm using the SimpleFOC Commander interface, which is a set of commands similar to G-code. .md-content__button { display: none; }","title":"Motor Control"},{"location":"final-project/motor_control.html#motor-control","text":"Here I'm giving the motor commands from the Arduino serial monitor. I'm using the SimpleFOC Commander interface, which is a set of commands similar to G-code. .md-content__button { display: none; }","title":"Motor Control   "},{"location":"final-project/presentation.html","text":"Final Project Presentation Presentation slide. 1 minute presentation video. Here I am presenting my final project to Professor Neil Gershenfeld and the instructors and students in the 2023 Fab Academy cycle. Link to the video. .md-content__button { display: none; }","title":"Presentation"},{"location":"final-project/presentation.html#final-project-presentation","text":"Presentation slide. 1 minute presentation video. Here I am presenting my final project to Professor Neil Gershenfeld and the instructors and students in the 2023 Fab Academy cycle. Link to the video. .md-content__button { display: none; }","title":"Final Project Presentation   "}]}
\ No newline at end of file
+{"config":{"indexing":"full","lang":["en"],"min_search_length":3,"prebuild_index":false,"separator":"[\\s\\-]+"},"docs":[{"location":"index.html","text":"Svavar's Fab Academy Journey Me My name is Svavar Konr\u00e1\u00f0sson . Welcome to my home on the web. Here I will document my studies at the Fab Academy , class 2023 . I have the good fortune to work at Fab Lab \u00cdsafj\u00f6r\u00f0ur , Iceland. My instructor and the head of Fab Lab \u00cdsafj\u00f6r\u00f0ur (and meme grand master) is \u00de\u00f3rarinn Bjartur Brei\u00f0fj\u00f6r\u00f0 Gunnarsson . I plan to pester him day and night for the next six months when my gadgets aren't working. What is the Fab Academy? How can I describe the Fab Academy? It's like getting an acceptance letter into Hogwarts. A vast world opens up to you full of outstanding people who, working together, can make (almost) anything. You have the support of a group of unassuming people around the world who are actually technology wizards, but also appreciate the human element in art and design. It's the only online school where I've actually gotten to know the people taking part in it. Already I've made a contribution to a technical project in Spain and I'm only just getting started in the Fab Lab network. I've made friends, laughed and cried and yet the Academy somehow only exists inside my laptop and I struggle to convey how significant this thing is to outsiders. Currently there are 2500 places like this in the world where I can now walk in and talk the same language as the people there and share stories of struggling to make things work. And the number of Fab Labs doubles every 18 months. Isn't that wild? Something's going on here. Team FMCU . See my contribution here . The Fab Academy has existed in a side reality for a long time, but it's invisible to Muggles. They have their peculiar speech and conventions that are impenetrable to outsiders. You Google the Fab Academy and find a cryptic website full of bare HTML links to technical resources without any explanations. Ah, my friend, but you lack the guidance and the context! Come on in. Check out the meme channel in the Mattermost chat. \u00de\u00f3rarinn, the undisputed Fab Academy meme master. See here . Image from Adri\u00e1n Torres. An introduction to modern technology as we know it might realistically be taken on in a ten-year period, but we rush through it in six months. You'd better keep up, because you need to make microcontroller boards that talk to each other- wait, no, now you must mill a 3D shape and make a casting- hold on, now it's time for web programming, but lay that to one side because now you need to stay up all night to finish your final project and present it to a hundred faces on your laptop screen that are in all time zones and climates and circumstances in the world and you are now a part of this group, which shares a traumat- I mean a transformative experience. Two out of three Icelanders taking the Fab Academy this year got sick right after their final project presentation, because the pressure had been so high. Was it worth it? Absolutely. I would hardly dare make electronics before but now I'm considering taking an electronic product to market. There is a before and an after the Fab Academy. I now know how an aluminum profile feels. You know, they don't heat the material that much, they just push it with enormous force until it deforms and takes on the shape of the die. Before I was a disorganized scatterbrain. Now I talk about designing things in spirals and documenting everything, not because I'm being forced to do it anymore but because I enjoy the process. What is happening to me? My rune This is the rune that I made for myself in grade school, made up of my initials SK. To start my Fab Academy journey, I made it digital, using Inkscape. My desk The image shows my desk when all the components for the Fab Academy arrived. Yay! Outside the window you can see the harbor and the mountain. In the window next to the headphones is the second thing I ever 3D printed, the Stormtrooper Buddha . My desk setup consists of three screens: Tandy 102 On the left I have a Tandy 102 laptop from 1985 that I use for writing, because it has the best keyboard I've ever used by far. This was my father's computer. He ordered a device called NADSBox that plugs into its RS232 port and allows me to put my writings onto an SD card. They're in the old .DO format (this came before .DOC, which came before .DOCX). Then I run a little converter program and voil\u00e1! I can put the text on the web. I use this computer in memory of my father and I also light a candle on my desk every day to keep him with me. ThinkPad In the middle is my ThinkPad laptop, which can become totally flat! So I milled a simple stand out of two pieces of birch plywood and lasered our logo on it. I use the laptop screen mostly for modeling and image editing. It's color calibrated. I also use it to connect to a few Raspberry Pi computers using a VNC Viewer cloud connection. Vertical monitor On the right is a vertical monitor that I use for coding, reading documents and browsing the internet. Missing from the picture is a 10 year old iPad which works great. I mainly use it to read and annotate PDF documents now, but I used it as my work computer at university for a while. I even wrote and submitted a grant application using only the on-screen keyboard! .md-content__button { display: none; }","title":"Home"},{"location":"index.html#svavars-fab-academy-journey","text":"","title":"Svavar's Fab Academy Journey   "},{"location":"index.html#me","text":"My name is Svavar Konr\u00e1\u00f0sson . Welcome to my home on the web. Here I will document my studies at the Fab Academy , class 2023 . I have the good fortune to work at Fab Lab \u00cdsafj\u00f6r\u00f0ur , Iceland. My instructor and the head of Fab Lab \u00cdsafj\u00f6r\u00f0ur (and meme grand master) is \u00de\u00f3rarinn Bjartur Brei\u00f0fj\u00f6r\u00f0 Gunnarsson . I plan to pester him day and night for the next six months when my gadgets aren't working.","title":"Me"},{"location":"index.html#what-is-the-fab-academy","text":"How can I describe the Fab Academy? It's like getting an acceptance letter into Hogwarts. A vast world opens up to you full of outstanding people who, working together, can make (almost) anything. You have the support of a group of unassuming people around the world who are actually technology wizards, but also appreciate the human element in art and design. It's the only online school where I've actually gotten to know the people taking part in it. Already I've made a contribution to a technical project in Spain and I'm only just getting started in the Fab Lab network. I've made friends, laughed and cried and yet the Academy somehow only exists inside my laptop and I struggle to convey how significant this thing is to outsiders. Currently there are 2500 places like this in the world where I can now walk in and talk the same language as the people there and share stories of struggling to make things work. And the number of Fab Labs doubles every 18 months. Isn't that wild? Something's going on here. Team FMCU . See my contribution here . The Fab Academy has existed in a side reality for a long time, but it's invisible to Muggles. They have their peculiar speech and conventions that are impenetrable to outsiders. You Google the Fab Academy and find a cryptic website full of bare HTML links to technical resources without any explanations. Ah, my friend, but you lack the guidance and the context! Come on in. Check out the meme channel in the Mattermost chat. \u00de\u00f3rarinn, the undisputed Fab Academy meme master. See here . Image from Adri\u00e1n Torres. An introduction to modern technology as we know it might realistically be taken on in a ten-year period, but we rush through it in six months. You'd better keep up, because you need to make microcontroller boards that talk to each other- wait, no, now you must mill a 3D shape and make a casting- hold on, now it's time for web programming, but lay that to one side because now you need to stay up all night to finish your final project and present it to a hundred faces on your laptop screen that are in all time zones and climates and circumstances in the world and you are now a part of this group, which shares a traumat- I mean a transformative experience. Two out of three Icelanders taking the Fab Academy this year got sick right after their final project presentation, because the pressure had been so high. Was it worth it? Absolutely. I would hardly dare make electronics before but now I'm considering taking an electronic product to market. There is a before and an after the Fab Academy. I now know how an aluminum profile feels. You know, they don't heat the material that much, they just push it with enormous force until it deforms and takes on the shape of the die. Before I was a disorganized scatterbrain. Now I talk about designing things in spirals and documenting everything, not because I'm being forced to do it anymore but because I enjoy the process. What is happening to me?","title":"What is the Fab Academy?"},{"location":"index.html#my-rune","text":"This is the rune that I made for myself in grade school, made up of my initials SK. To start my Fab Academy journey, I made it digital, using Inkscape.","title":"My rune"},{"location":"index.html#my-desk","text":"The image shows my desk when all the components for the Fab Academy arrived. Yay! Outside the window you can see the harbor and the mountain. In the window next to the headphones is the second thing I ever 3D printed, the Stormtrooper Buddha . My desk setup consists of three screens:","title":"My desk"},{"location":"index.html#tandy-102","text":"On the left I have a Tandy 102 laptop from 1985 that I use for writing, because it has the best keyboard I've ever used by far. This was my father's computer. He ordered a device called NADSBox that plugs into its RS232 port and allows me to put my writings onto an SD card. They're in the old .DO format (this came before .DOC, which came before .DOCX). Then I run a little converter program and voil\u00e1! I can put the text on the web. I use this computer in memory of my father and I also light a candle on my desk every day to keep him with me.","title":"Tandy 102"},{"location":"index.html#thinkpad","text":"In the middle is my ThinkPad laptop, which can become totally flat! So I milled a simple stand out of two pieces of birch plywood and lasered our logo on it. I use the laptop screen mostly for modeling and image editing. It's color calibrated. I also use it to connect to a few Raspberry Pi computers using a VNC Viewer cloud connection.","title":"ThinkPad"},{"location":"index.html#vertical-monitor","text":"On the right is a vertical monitor that I use for coding, reading documents and browsing the internet. Missing from the picture is a 10 year old iPad which works great. I mainly use it to read and annotate PDF documents now, but I used it as my work computer at university for a while. I even wrote and submitted a grant application using only the on-screen keyboard! .md-content__button { display: none; }","title":"Vertical monitor"},{"location":"about.html","text":"About me Article in Icelandic about my work and studies I'm Svavar Konr\u00e1\u00f0sson. I have a BSc in Mechanical Engineering. The only thing I have left in my Master's degree is the thesis. I'll do that after the Fab Academy :) Here's a video where I tell Neil Gershenfeld a little bit about myself during random review. I took part in starting Team Spark , the Icelandic Formula Student team, which designs and fabricates an electric racing car and competes against other university teams at Silverstone circuit in England every year. I did the structural design of an innovative RIB at Rafnar boatyard , a boat that is now manufactured in five countries around the world. I started a company around the design of a simple and inexpensive suspension seat for high-speed boats. I also started the first proper 3D printing service in Iceland. I've started several promising things. My problem has been bad focus and project management and letting the scope of my projects grow until it's impossible to finish them. Therefore I suspect that Project Management will prove to be the most important part of Fab Academy for me. Now I work at Fab Lab \u00cdsafj\u00f6r\u00f0ur, Iceland and I'm interested in ways to introduce students to computer-controlled machines. I want 2023 to be my year of finishing things. Note I still need to add more details to the bio. I need to figure out the way to copy text documents from my father's antiquated Tandy 102 portable computer, on which I wrote more text about myself. Fab Academy Student Agreement The Fab Academy is responsible for: Teaching principles and practices of digital fabrication Arranging lectures, recitations, meetings, and events for the class Evaluating and providing feedback on student work Offering clear standards for completing assignments Certifying and archiving student progress Supervising class preparation Reviewing prospective students, instructors, and labs Providing central staff and infrastructure for students, instructors, and labs Fund-raising for costs not covered by student tuition Managing and reporting on the program's finances, results, and impacts Publicizing the program Promoting a respectful environment free of harassment and discrimination Encourage a diverse, accessible, and equitable community I am a Fab Academy student, responsible for: Attending class lectures and participating in reviews Developing and documenting projects assigned to introduce and demonstrate skills Allowing the Fab Academy to share my work (with attribution) in the class for purposes compatible with its mission Honestly reporting on my work, and appropriately attributing the work of others (both human and machine) Working safely Leaving workspaces in the same (or better) condition than I found them Participating in the upkeep of my lab Ensuring that my tuition for local and central class costs is covered Following locally applicable health and safety guidance Promoting a respectful environment free of harassment and discrimination Signed by committing this file in my repository, Svavar Konr\u00e1\u00f0sson .md-content__button { display: none; }","title":"About Me"},{"location":"about.html#about-me","text":"Article in Icelandic about my work and studies","title":"About me   "},{"location":"about.html#im-svavar-konrasson","text":"I have a BSc in Mechanical Engineering. The only thing I have left in my Master's degree is the thesis. I'll do that after the Fab Academy :) Here's a video where I tell Neil Gershenfeld a little bit about myself during random review. I took part in starting Team Spark , the Icelandic Formula Student team, which designs and fabricates an electric racing car and competes against other university teams at Silverstone circuit in England every year. I did the structural design of an innovative RIB at Rafnar boatyard , a boat that is now manufactured in five countries around the world. I started a company around the design of a simple and inexpensive suspension seat for high-speed boats. I also started the first proper 3D printing service in Iceland. I've started several promising things. My problem has been bad focus and project management and letting the scope of my projects grow until it's impossible to finish them. Therefore I suspect that Project Management will prove to be the most important part of Fab Academy for me. Now I work at Fab Lab \u00cdsafj\u00f6r\u00f0ur, Iceland and I'm interested in ways to introduce students to computer-controlled machines. I want 2023 to be my year of finishing things. Note I still need to add more details to the bio. I need to figure out the way to copy text documents from my father's antiquated Tandy 102 portable computer, on which I wrote more text about myself.","title":"I'm Svavar Konr\u00e1\u00f0sson."},{"location":"about.html#fab-academy-student-agreement","text":"The Fab Academy is responsible for: Teaching principles and practices of digital fabrication Arranging lectures, recitations, meetings, and events for the class Evaluating and providing feedback on student work Offering clear standards for completing assignments Certifying and archiving student progress Supervising class preparation Reviewing prospective students, instructors, and labs Providing central staff and infrastructure for students, instructors, and labs Fund-raising for costs not covered by student tuition Managing and reporting on the program's finances, results, and impacts Publicizing the program Promoting a respectful environment free of harassment and discrimination Encourage a diverse, accessible, and equitable community I am a Fab Academy student, responsible for: Attending class lectures and participating in reviews Developing and documenting projects assigned to introduce and demonstrate skills Allowing the Fab Academy to share my work (with attribution) in the class for purposes compatible with its mission Honestly reporting on my work, and appropriately attributing the work of others (both human and machine) Working safely Leaving workspaces in the same (or better) condition than I found them Participating in the upkeep of my lab Ensuring that my tuition for local and central class costs is covered Following locally applicable health and safety guidance Promoting a respectful environment free of harassment and discrimination Signed by committing this file in my repository, Svavar Konr\u00e1\u00f0sson .md-content__button { display: none; }","title":"Fab Academy Student Agreement"},{"location":"assignments/week01.html","text":"Principles and Practices Final project sketch Here I am describing my final project idea to Neil Gershenfeld and the students and instructors in Fab Academy cycle 2023. Link to the video. This week, I set up an ideas page with the three ideas that I have for a final project in the Fab Academy. Check them out, they're quite fun! I ended up picking the friendly little educational robot arm called baks. It's powered by brushless servomotors and has a structure made entirely of PCBs! So the robot consists of motors and PCBs and nothing else! I may not be able to get all the way there during the Fab Academy, but I do want to make that happen eventually. I put the Student Agreement on my About page. This is a document that I sign by committing it to my repository and it states the code of conduct that I will abide by during my Fab Academy studies. Setting up this website The rest of this page is dedicated to how I set up this nice website using Material for MkDocs . The website is hosted on Gitlab and deployed automatically using this file . But you can also easily deploy an MkDocs website to GitHub Pages. I did that before starting the Fab Academy, to get a little bit of a head start. You can check out the test site . I experimented with lots of different colors, but ended up finding black and white cleaner and more elegant. I also added a dark mode that you can toggle, but removed it again when it didn't work very well with the color scheme I ended up with. I really liked trying all the features in Material for MkDocs, it's a really well designed package and superbly documented. In a video call a few months before the Fab Academy started, \u00c1rni Bj\u00f6rnsson showed M\u00f3ses and me how to set up MkDocs and Git. I've summarized all the steps here, with a few extra resources I found along the way: MkDocs setup from scratch First, I installed the VSCode editor. Then, as \u00c1rni Bj\u00f6rnsson suggested, I created a folder called code directly in my C: drive, so that Dropbox and OneDrive don't try to sync the folder and mess up my GitHub connection. I followed this tutorial to set up MkDocs. There are a few steps to it: It starts with installing the Python extension for VSCode . The pip package manager is included with the Python extension, but for some reason you don't always get the latest version. It may be a good idea to check the version. I opened a new terminal in the top menu in VSCode ( Terminal -> New Terminal ) and typed pip --version pip 22.3.1 from C:\\Users\\your_username\\AppData\\Local\\Programs\\Python\\Python310\\lib\\site-packages\\pip (python 3.10) and if it says it's out of date you can upgrade pip like this: pip install --upgrade pip Now it was time to install MkDocs: pip install mkdocs Then, as the tutorial suggests, I typed mkdocs --version to see if the installation went OK: mkdocs --version mkdocs, version 1.2.0 from /usr/local/lib/python3.8/site-packages/mkdocs (Python 3.8) (optional) I also installed the Material theme because it seemed nice and it includes expandable code annotations: pip install mkdocs-material Note To enable notes like this one in MkDocs, I added Admonitions to the extensions in the mkdocs.yml file: markdown_extensions : admonition Then, to create a note, start with !!! note and then indent the note text: !!! note Note text MkDocs test drive I followed this tutorial to set up a small test website and get a live preview. After installing Material for MkDocs, I made a folder called Mkdocs. Then I opened a terminal and made sure that it was in the right folder: cd C:\\code\\Mkdocs Then I simply typed mkdocs new . and that was enough to create a simple site! Well, there are a few extra steps to view the site and deploy it, but this whole process is very simple. Then I added the following lines to mkdocs.yml: theme : name : material This is geared towards the Material theme for MkDocs, so if you're using a different theme, the only thing you need to change is the the theme line in the mkdocs.yml file. Set up autocomplete. The tutorial suggests adding a line to settings.json, but it doesn't mention where that file is in VSCode. But it does provide a link to instructions. You go into File -> Preferences -> Settings , scroll all the way down to Yaml: Schemas, and click Edit in settings.json . Then you add the line \"https://squidfunk.github.io/mkdocs-material/schema.json\": \"mkdocs.yml\" , so in the end it looks like this: { \"workbench.colorTheme\" : \"Default Dark+\" , \"files.autoSave\" : \"afterDelay\" , \"yaml.schemas\" : { \"https://squidfunk.github.io/mkdocs-material/schema.json\" : \"mkdocs.yml\" } } OK, now we're yearning for something to happen. Type the following into the terminal: mkdocs serve Now open your browser and write localhost:8000 in the address bar. Voil\u00e1! We have a live preview for an MkDocs website! Material theme Default theme The source code for this site, written in Markdown, looks like this: # Welcome to MkDocs For full documentation visit [mkdocs.org](https://www.mkdocs.org). ## Commands * `mkdocs new [dir-name]` - Create a new project. * `mkdocs serve` - Start the live-reloading docs server. * `mkdocs build` - Build the documentation site. * `mkdocs -h` - Print help message and exit. ## Project layout mkdocs.yml # The configuration file. docs/ index.md # The documentation homepage. ... # Other markdown pages, images and other files. As you can see, Markdown is simple and readable. Writing # gives you the biggest heading, ## gives you heading 2, and so on. Put * in front of text to make a bullet point. To add a link, you do this: [mkdocs.org](https://www.mkdocs.org) and to add an image you do the same, but with an exclamation mark: ![Material theme](./images/material.png) 5. Finally, to build a static site, write this in the terminal: mkdocs build I tried this and got an index page that works fine. But when I clicked the Setup page (this page) I got this: Hm. Apparently you need to put the structure of the site into your mkdocs.yml file to explicitly state the site navigation. So I opened it up and added nav: - 'index.md' - 'code.md' # The code page is just a few code block tests in different languages. - 'setup.md' No, that didn't work either. After some looking around I found a solution . I added the following line to mkdocs.yml : use_directory_urls: false It works! And the first solution is unnecessary; MkDocs will infer the site navigation based on the pages you create in the docs folder. Setting up MkDocs and getting the live preview working took me an hour in the morning. Writing up how I did it took me the rest of the day. Writing this documentation was a great way to learn Markdown. I like Markdown, with one exception; I don't have the ` symbol on my keyboard. I need to use this symbol quite a lot for code snippets. I did a bit of searching and found that the shortcut Alt-96 is bound to the ` symbol. Now I use that shortcut all the time. And this page serves as my Markdown reference, when I start to forget how to format things. Note To enable code highlighting (coloring the code, similar to how it looks in the VSCode editor), I added the following lines to mkdocs.yml : markdown_extensions: - pymdownx.highlight: anchor_linenums: true - pymdownx.inlinehilite - pymdownx.snippets - pymdownx.superfences - pymdownx.details I'm not sure if all these lines are necessary, but I'm just following this tutorial . The last line comes from somewhere else, I don't remember where. Anyway, now I can make a code block by enclosing the code with ``` at the top and bottom and including the name of the language at the top: ```python # This program prints Hello, world! print('Hello, world!') ``` This results in: # This program prints Hello, world! print ( 'Hello, world!' ) You can use this reference to write the name of the programming language correctly at the top of your code block. One more thing, I also added markdown_extensions : - attr_list - md_in_html to add the ability to align images, add captions and mark large images for lazy loading, as per this tutorial . Customizing the theme There is a way to toggle between light and dark mode. This tutorial says that it's enough to copy the following code into mkdocs.yml : Light/dark toggle Light/dark toggle + Color change theme : palette : # Palette toggle for light mode - scheme : default toggle : icon : material/brightness-7 name : Switch to dark mode # Palette toggle for dark mode - scheme : slate toggle : icon : material/brightness-4 name : Switch to light mode theme : palette : # Palette toggle for light mode - scheme : default toggle : icon : material/brightness-7 name : Switch to dark mode primary : red accent : red # Palette toggle for dark mode - scheme : slate toggle : icon : material/brightness-4 name : Switch to light mode primary : red accent : red Source code for the content tabs above === \"Light/dark toggle\" ``` yaml theme: palette: # Palette toggle for light mode - scheme: default toggle: icon: material/brightness-7 name: Switch to dark mode # Palette toggle for dark mode - scheme: slate toggle: icon: material/brightness-4 name: Switch to light mode ``` === \"Light/dark toggle + Color change\" ``` yaml palette: # Palette toggle for light mode - scheme: default toggle: icon: material/brightness-7 name: Switch to dark mode primary: red accent: red # Palette toggle for dark mode - scheme: slate toggle: icon: material/brightness-4 name: Switch to light mode primary: red accent: red ``` Note How to enable content tabs in mkdocs.yaml : markdown_extensions : - pymdownx.superfences - pymdownx.tabbed : alternate_style : true I also discovered that if you select a command, right click and select Change All Occurrences in VSCode, you only need to write the new color once, instead of four times. Nice! Finally, I made a logo in Inkscape. I designed this logo in grade school, it's a kind of Icelandic rune that combines my initials S and K. Then I added two lines to mkdocs.yml to change the logo in the top left corner and also the favicon (the icon you see in the browser tab). theme : logo : images\\SK_logo.svg favicon : images\\SK_logo.svg First I wrote the path as /images/SK_logo.SVG and VSCode complained about the formatting. I found that you can right click the image and select Copy Relative Path to get the right formatting. That gave me docs\\images\\SK_logo.svg , which didn't work, but when I changed it to images\\SK_logo.svg it worked. I also enabled two navigation options: theme : features : - navigation.instant # Instant loading (page behaves like a single-page application, search persists between pages) - navigation.tabs # The pages are visible as tabs at the top instead of on the left hand side. Page source The Markdown code for the View page Source button is like this: [View page source](setup.txt){ .md-button } I'm going to put it on every page of my documentation. If you see an interesting element in the page, you can then easily see how to set it up. Pointing to a .md file doesn't work, so my workaround is to make a copy of the Markdown source file and change its extension to .txt . I made a Python script using this tutorial and put it in the MkDocs folder. The script copies all the .md files in the docs folder to another folder called textfiles and converts their extension to .txt. The View Page Source button at the bottom of each page links to its respective .txt file. The Python code has some bug, so that it only converts code.md to code.txt, but I'm happy that I was able to get that far. To change the theme for just the home page, I followed tmeuze's advice in this issue . To mkdocs.yml I added custom_dir : docs/overrides and created a docs/overrides folder. Then I was unsure how to set up a custom theme, so I stopped there. To enable the Github Repository button in the top right corner, I followed this example and added the following to my mkdocs.yml , just below site_name : repo_name : Github Repository repo_url : https://github.com/svavarkonn/MkDocs I added theme : features : - navigation.tracking so that the URL in the address bar changes as you scroll down the page. If you copy the URL, the page will open in the section where you were when you copied it. Might be convenient if someone wants to link to something on this site. I also added theme : features : - navigation.tabs - navigation.tabs.sticky to make the top navigation follow you as you scroll down the page. By default, an \"Edit this page\" symbol is added to the right of the headline of every page. When you click it you just get a 404 error. I followed this to remove the edit button. I just add the CSS code < style > . md-content__button { display : none ; } </ style > to the .md file of each page and voil\u00e1! The edit button disappears. To enable icons and emojis, I followed the Material for MkDocs documentation on icons and emojis and added the following to mkdocs.yml : markdown_extensions: - attr_list - pymdownx.emoji: emoji_index: !!python/name:materialx.emoji.twemoji emoji_generator: !!python/name:materialx.emoji.to_svg Now I can make faces :smile: (Hmm, apparently this isn't working anymore.) To enable keyboard keys like Ctrl + Alt + Del , I added the following to mkdocs.yml : markdown_extensions : - pymdownx.keys Now I can add keyboard keys into the text by enclosing the expression with ++, and using one + between keys. The buttons above are made by typing ++ctrl+alt+del++ . Here is the key reference. There is no way to make image captions in Markdown. This seems like a glaring omission. I used this method of putting the caption in the next line after the image tag and enclosing the caption with **. Like this: ![ Git discussion ]( images/hategit.PNG ) *Some sentiments about Git* The image and caption are displayed like this: Sentiments about Git The caption is inline with the image, which is not great, but the workflow is simple, so I'm keeping it. If the caption doesn't work, put it inline with the image tag. View page source I've stopped using the page source button above, which links to a text file that I need to update manually. I've instead added an icon next to the page title at the top. I got the icon from Iconify . The icon links to the page source in the Github repository. MathJax I installed MathJax by following the steps in the Material for MkDocs documentation . Now I can display beautiful equations on my website, using LaTex syntax. Git setup I cloned my Fab Academy repository on Gitlab to my computer with $ git clone https://gitlab.fabcloud.org/academany/fabacademy/2023/labs/isafjordur/students/svavar-konradsson.git Cloning into 'svavar-konradsson'... remote: Enumerating objects: 15, done. remote: Counting objects: 100% (15/15), done. remote: Compressing objects: 100% (14/14), done. remote: Total 15 (delta 1), reused 0 (delta 0), pack-reused 0 Receiving objects: 100% (15/15), 28.91 KiB | 7.23 MiB/s, done. Resolving deltas: 100% (1/1), done. Then I edited index.html a little bit and tried pushing the change to the online repo on Gitlab: git push warning: missing OAuth configuration for gitlab.fabcloud.org - see https://aka.ms/gcm/gitlab for more information remote: HTTP Basic: Access denied. The provided password or token is incorrect or your account has 2FA enabled and you must use a personal access token instead of a password. See https://gitlab.fabcloud.org/help/topics/git/troubleshooting_git#error-on-git-fetch-http-basic-access-denied fatal: Authentication failed for 'https://gitlab.fabcloud.org/academany/fabacademy/2023/labs/isafjordur/students/svavar-konradsson.git/' I looked up how to get an access token and my instructor \u00de\u00f3rarinn asked \u00c1rni at Fab Lab Akureyri if tokens are the thing to use. \u00c1rni recommended using an SSH key instead. I managed to generate an ssh key using this tutorial : $ ssh-keygen -t ed25519 -C \"generate an ssh key for gitlab to clone my repository\" Generating public/private ed25519 key pair. Enter file in which to save the key (/n//.ssh/id_ed25519): Enter passphrase (empty for no passphrase): Then I wasn't able to type anything as a passphrase. Then I found that I was able to type, but the Git Bash terminal just didn't show anything. I wrote a phrase twice and hit Enter. I got an SSH key. OK, I have an SSH key, but what do I do with it? And why? Why is this so complicated? I just want to upload some files to the internet. Then I found this tutorial on adding an SSH key to my Gitlab account and followed it blindly. I used $ cat ~/.ssh/id_ed25519.pub | clip to copy the contents of the SSH key file. Put it into the Gitlab account under profile -> SSH keys -> Add an SSH key. Then went into C:/code and said `git clone \"the thing I copied when I pressed clone in gitlab\"' Then I got the message: *** Please tell me who you are. Run git config --global user.email \"you@example.com\" git config --global user.name \"Your Name\" to set your account's default identity. I set the identity: PS C:\\code\\svavar-konradsson> git config --global user.email \"my@email.com\" PS C:\\code\\svavar-konradsson> git config --global user.name \"Svavar Konradsson\" and then said git clone That worked! I opened index.html, put my name into the title and saved. That appeared under Source control in Gitlab, I wrote a comment in a field and clicked Commit. Then a Sync button appeared and I pressed that and it pushed the site onto the online repo. Now I need to type my passphrase twice every time that I push files to the online repo. That's annoying, so I'm going to generate a new SSH key and skip the passphrase. I followed \u00c1rni Bj\u00f6rnsson documentation to generate an RSA key and put it into my Gitlab profile. Every time I made major changes in Windows Explorer; deleted lots of files and moved others, I needed to generate a new SSH key. The last one was ssh-keygen -t rsa -b 2048 cat ~/.ssh/id_rsa.pub | clip The MkDocs convention is to put the built web page into a folder called site, but Gitlab needs the web site to be in a folder called public. Finally I found the site_dir setting , which I can change in mkdocs.yml so that I can rename the site folder to public . At first I manually renamed site to public and pushed the files to the Gitlab repo. I got into trouble when the dot in front of the file .gitlab-ci.yml was erased somehow and the site wasn't deployed. My instructor \u00de\u00f3rarinn found the problem and after that the site worked. .md-content__button { display: none; }","title":"1. Principles and Practices"},{"location":"assignments/week01.html#principles-and-practices","text":"","title":"Principles and Practices   "},{"location":"assignments/week01.html#final-project-sketch","text":"Here I am describing my final project idea to Neil Gershenfeld and the students and instructors in Fab Academy cycle 2023. Link to the video. This week, I set up an ideas page with the three ideas that I have for a final project in the Fab Academy. Check them out, they're quite fun! I ended up picking the friendly little educational robot arm called baks. It's powered by brushless servomotors and has a structure made entirely of PCBs! So the robot consists of motors and PCBs and nothing else! I may not be able to get all the way there during the Fab Academy, but I do want to make that happen eventually. I put the Student Agreement on my About page. This is a document that I sign by committing it to my repository and it states the code of conduct that I will abide by during my Fab Academy studies.","title":"Final project sketch"},{"location":"assignments/week01.html#setting-up-this-website","text":"The rest of this page is dedicated to how I set up this nice website using Material for MkDocs . The website is hosted on Gitlab and deployed automatically using this file . But you can also easily deploy an MkDocs website to GitHub Pages. I did that before starting the Fab Academy, to get a little bit of a head start. You can check out the test site . I experimented with lots of different colors, but ended up finding black and white cleaner and more elegant. I also added a dark mode that you can toggle, but removed it again when it didn't work very well with the color scheme I ended up with. I really liked trying all the features in Material for MkDocs, it's a really well designed package and superbly documented. In a video call a few months before the Fab Academy started, \u00c1rni Bj\u00f6rnsson showed M\u00f3ses and me how to set up MkDocs and Git. I've summarized all the steps here, with a few extra resources I found along the way:","title":"Setting up this website"},{"location":"assignments/week01.html#mkdocs-setup-from-scratch","text":"First, I installed the VSCode editor. Then, as \u00c1rni Bj\u00f6rnsson suggested, I created a folder called code directly in my C: drive, so that Dropbox and OneDrive don't try to sync the folder and mess up my GitHub connection. I followed this tutorial to set up MkDocs. There are a few steps to it: It starts with installing the Python extension for VSCode . The pip package manager is included with the Python extension, but for some reason you don't always get the latest version. It may be a good idea to check the version. I opened a new terminal in the top menu in VSCode ( Terminal -> New Terminal ) and typed pip --version pip 22.3.1 from C:\\Users\\your_username\\AppData\\Local\\Programs\\Python\\Python310\\lib\\site-packages\\pip (python 3.10) and if it says it's out of date you can upgrade pip like this: pip install --upgrade pip Now it was time to install MkDocs: pip install mkdocs Then, as the tutorial suggests, I typed mkdocs --version to see if the installation went OK: mkdocs --version mkdocs, version 1.2.0 from /usr/local/lib/python3.8/site-packages/mkdocs (Python 3.8) (optional) I also installed the Material theme because it seemed nice and it includes expandable code annotations: pip install mkdocs-material Note To enable notes like this one in MkDocs, I added Admonitions to the extensions in the mkdocs.yml file: markdown_extensions : admonition Then, to create a note, start with !!! note and then indent the note text: !!! note Note text","title":"MkDocs setup from scratch"},{"location":"assignments/week01.html#mkdocs-test-drive","text":"I followed this tutorial to set up a small test website and get a live preview. After installing Material for MkDocs, I made a folder called Mkdocs. Then I opened a terminal and made sure that it was in the right folder: cd C:\\code\\Mkdocs Then I simply typed mkdocs new . and that was enough to create a simple site! Well, there are a few extra steps to view the site and deploy it, but this whole process is very simple. Then I added the following lines to mkdocs.yml: theme : name : material This is geared towards the Material theme for MkDocs, so if you're using a different theme, the only thing you need to change is the the theme line in the mkdocs.yml file. Set up autocomplete. The tutorial suggests adding a line to settings.json, but it doesn't mention where that file is in VSCode. But it does provide a link to instructions. You go into File -> Preferences -> Settings , scroll all the way down to Yaml: Schemas, and click Edit in settings.json . Then you add the line \"https://squidfunk.github.io/mkdocs-material/schema.json\": \"mkdocs.yml\" , so in the end it looks like this: { \"workbench.colorTheme\" : \"Default Dark+\" , \"files.autoSave\" : \"afterDelay\" , \"yaml.schemas\" : { \"https://squidfunk.github.io/mkdocs-material/schema.json\" : \"mkdocs.yml\" } } OK, now we're yearning for something to happen. Type the following into the terminal: mkdocs serve Now open your browser and write localhost:8000 in the address bar. Voil\u00e1! We have a live preview for an MkDocs website! Material theme Default theme The source code for this site, written in Markdown, looks like this: # Welcome to MkDocs For full documentation visit [mkdocs.org](https://www.mkdocs.org). ## Commands * `mkdocs new [dir-name]` - Create a new project. * `mkdocs serve` - Start the live-reloading docs server. * `mkdocs build` - Build the documentation site. * `mkdocs -h` - Print help message and exit. ## Project layout mkdocs.yml # The configuration file. docs/ index.md # The documentation homepage. ... # Other markdown pages, images and other files. As you can see, Markdown is simple and readable. Writing # gives you the biggest heading, ## gives you heading 2, and so on. Put * in front of text to make a bullet point. To add a link, you do this: [mkdocs.org](https://www.mkdocs.org) and to add an image you do the same, but with an exclamation mark: ![Material theme](./images/material.png) 5. Finally, to build a static site, write this in the terminal: mkdocs build I tried this and got an index page that works fine. But when I clicked the Setup page (this page) I got this: Hm. Apparently you need to put the structure of the site into your mkdocs.yml file to explicitly state the site navigation. So I opened it up and added nav: - 'index.md' - 'code.md' # The code page is just a few code block tests in different languages. - 'setup.md' No, that didn't work either. After some looking around I found a solution . I added the following line to mkdocs.yml : use_directory_urls: false It works! And the first solution is unnecessary; MkDocs will infer the site navigation based on the pages you create in the docs folder. Setting up MkDocs and getting the live preview working took me an hour in the morning. Writing up how I did it took me the rest of the day. Writing this documentation was a great way to learn Markdown. I like Markdown, with one exception; I don't have the ` symbol on my keyboard. I need to use this symbol quite a lot for code snippets. I did a bit of searching and found that the shortcut Alt-96 is bound to the ` symbol. Now I use that shortcut all the time. And this page serves as my Markdown reference, when I start to forget how to format things. Note To enable code highlighting (coloring the code, similar to how it looks in the VSCode editor), I added the following lines to mkdocs.yml : markdown_extensions: - pymdownx.highlight: anchor_linenums: true - pymdownx.inlinehilite - pymdownx.snippets - pymdownx.superfences - pymdownx.details I'm not sure if all these lines are necessary, but I'm just following this tutorial . The last line comes from somewhere else, I don't remember where. Anyway, now I can make a code block by enclosing the code with ``` at the top and bottom and including the name of the language at the top: ```python # This program prints Hello, world! print('Hello, world!') ``` This results in: # This program prints Hello, world! print ( 'Hello, world!' ) You can use this reference to write the name of the programming language correctly at the top of your code block. One more thing, I also added markdown_extensions : - attr_list - md_in_html to add the ability to align images, add captions and mark large images for lazy loading, as per this tutorial .","title":"MkDocs test drive"},{"location":"assignments/week01.html#customizing-the-theme","text":"There is a way to toggle between light and dark mode. This tutorial says that it's enough to copy the following code into mkdocs.yml : Light/dark toggle Light/dark toggle + Color change theme : palette : # Palette toggle for light mode - scheme : default toggle : icon : material/brightness-7 name : Switch to dark mode # Palette toggle for dark mode - scheme : slate toggle : icon : material/brightness-4 name : Switch to light mode theme : palette : # Palette toggle for light mode - scheme : default toggle : icon : material/brightness-7 name : Switch to dark mode primary : red accent : red # Palette toggle for dark mode - scheme : slate toggle : icon : material/brightness-4 name : Switch to light mode primary : red accent : red Source code for the content tabs above === \"Light/dark toggle\" ``` yaml theme: palette: # Palette toggle for light mode - scheme: default toggle: icon: material/brightness-7 name: Switch to dark mode # Palette toggle for dark mode - scheme: slate toggle: icon: material/brightness-4 name: Switch to light mode ``` === \"Light/dark toggle + Color change\" ``` yaml palette: # Palette toggle for light mode - scheme: default toggle: icon: material/brightness-7 name: Switch to dark mode primary: red accent: red # Palette toggle for dark mode - scheme: slate toggle: icon: material/brightness-4 name: Switch to light mode primary: red accent: red ``` Note How to enable content tabs in mkdocs.yaml : markdown_extensions : - pymdownx.superfences - pymdownx.tabbed : alternate_style : true I also discovered that if you select a command, right click and select Change All Occurrences in VSCode, you only need to write the new color once, instead of four times. Nice! Finally, I made a logo in Inkscape. I designed this logo in grade school, it's a kind of Icelandic rune that combines my initials S and K. Then I added two lines to mkdocs.yml to change the logo in the top left corner and also the favicon (the icon you see in the browser tab). theme : logo : images\\SK_logo.svg favicon : images\\SK_logo.svg First I wrote the path as /images/SK_logo.SVG and VSCode complained about the formatting. I found that you can right click the image and select Copy Relative Path to get the right formatting. That gave me docs\\images\\SK_logo.svg , which didn't work, but when I changed it to images\\SK_logo.svg it worked. I also enabled two navigation options: theme : features : - navigation.instant # Instant loading (page behaves like a single-page application, search persists between pages) - navigation.tabs # The pages are visible as tabs at the top instead of on the left hand side.","title":"Customizing the theme"},{"location":"assignments/week01.html#page-source","text":"The Markdown code for the View page Source button is like this: [View page source](setup.txt){ .md-button } I'm going to put it on every page of my documentation. If you see an interesting element in the page, you can then easily see how to set it up. Pointing to a .md file doesn't work, so my workaround is to make a copy of the Markdown source file and change its extension to .txt . I made a Python script using this tutorial and put it in the MkDocs folder. The script copies all the .md files in the docs folder to another folder called textfiles and converts their extension to .txt. The View Page Source button at the bottom of each page links to its respective .txt file. The Python code has some bug, so that it only converts code.md to code.txt, but I'm happy that I was able to get that far. To change the theme for just the home page, I followed tmeuze's advice in this issue . To mkdocs.yml I added custom_dir : docs/overrides and created a docs/overrides folder. Then I was unsure how to set up a custom theme, so I stopped there. To enable the Github Repository button in the top right corner, I followed this example and added the following to my mkdocs.yml , just below site_name : repo_name : Github Repository repo_url : https://github.com/svavarkonn/MkDocs I added theme : features : - navigation.tracking so that the URL in the address bar changes as you scroll down the page. If you copy the URL, the page will open in the section where you were when you copied it. Might be convenient if someone wants to link to something on this site. I also added theme : features : - navigation.tabs - navigation.tabs.sticky to make the top navigation follow you as you scroll down the page. By default, an \"Edit this page\" symbol is added to the right of the headline of every page. When you click it you just get a 404 error. I followed this to remove the edit button. I just add the CSS code < style > . md-content__button { display : none ; } </ style > to the .md file of each page and voil\u00e1! The edit button disappears. To enable icons and emojis, I followed the Material for MkDocs documentation on icons and emojis and added the following to mkdocs.yml : markdown_extensions: - attr_list - pymdownx.emoji: emoji_index: !!python/name:materialx.emoji.twemoji emoji_generator: !!python/name:materialx.emoji.to_svg Now I can make faces :smile: (Hmm, apparently this isn't working anymore.) To enable keyboard keys like Ctrl + Alt + Del , I added the following to mkdocs.yml : markdown_extensions : - pymdownx.keys Now I can add keyboard keys into the text by enclosing the expression with ++, and using one + between keys. The buttons above are made by typing ++ctrl+alt+del++ . Here is the key reference. There is no way to make image captions in Markdown. This seems like a glaring omission. I used this method of putting the caption in the next line after the image tag and enclosing the caption with **. Like this: ![ Git discussion ]( images/hategit.PNG ) *Some sentiments about Git* The image and caption are displayed like this: Sentiments about Git The caption is inline with the image, which is not great, but the workflow is simple, so I'm keeping it. If the caption doesn't work, put it inline with the image tag. View page source I've stopped using the page source button above, which links to a text file that I need to update manually. I've instead added an icon next to the page title at the top. I got the icon from Iconify . The icon links to the page source in the Github repository.","title":"Page source"},{"location":"assignments/week01.html#mathjax","text":"I installed MathJax by following the steps in the Material for MkDocs documentation . Now I can display beautiful equations on my website, using LaTex syntax.","title":"MathJax"},{"location":"assignments/week01.html#git-setup","text":"I cloned my Fab Academy repository on Gitlab to my computer with $ git clone https://gitlab.fabcloud.org/academany/fabacademy/2023/labs/isafjordur/students/svavar-konradsson.git Cloning into 'svavar-konradsson'... remote: Enumerating objects: 15, done. remote: Counting objects: 100% (15/15), done. remote: Compressing objects: 100% (14/14), done. remote: Total 15 (delta 1), reused 0 (delta 0), pack-reused 0 Receiving objects: 100% (15/15), 28.91 KiB | 7.23 MiB/s, done. Resolving deltas: 100% (1/1), done. Then I edited index.html a little bit and tried pushing the change to the online repo on Gitlab: git push warning: missing OAuth configuration for gitlab.fabcloud.org - see https://aka.ms/gcm/gitlab for more information remote: HTTP Basic: Access denied. The provided password or token is incorrect or your account has 2FA enabled and you must use a personal access token instead of a password. See https://gitlab.fabcloud.org/help/topics/git/troubleshooting_git#error-on-git-fetch-http-basic-access-denied fatal: Authentication failed for 'https://gitlab.fabcloud.org/academany/fabacademy/2023/labs/isafjordur/students/svavar-konradsson.git/' I looked up how to get an access token and my instructor \u00de\u00f3rarinn asked \u00c1rni at Fab Lab Akureyri if tokens are the thing to use. \u00c1rni recommended using an SSH key instead. I managed to generate an ssh key using this tutorial : $ ssh-keygen -t ed25519 -C \"generate an ssh key for gitlab to clone my repository\" Generating public/private ed25519 key pair. Enter file in which to save the key (/n//.ssh/id_ed25519): Enter passphrase (empty for no passphrase): Then I wasn't able to type anything as a passphrase. Then I found that I was able to type, but the Git Bash terminal just didn't show anything. I wrote a phrase twice and hit Enter. I got an SSH key. OK, I have an SSH key, but what do I do with it? And why? Why is this so complicated? I just want to upload some files to the internet. Then I found this tutorial on adding an SSH key to my Gitlab account and followed it blindly. I used $ cat ~/.ssh/id_ed25519.pub | clip to copy the contents of the SSH key file. Put it into the Gitlab account under profile -> SSH keys -> Add an SSH key. Then went into C:/code and said `git clone \"the thing I copied when I pressed clone in gitlab\"' Then I got the message: *** Please tell me who you are. Run git config --global user.email \"you@example.com\" git config --global user.name \"Your Name\" to set your account's default identity. I set the identity: PS C:\\code\\svavar-konradsson> git config --global user.email \"my@email.com\" PS C:\\code\\svavar-konradsson> git config --global user.name \"Svavar Konradsson\" and then said git clone That worked! I opened index.html, put my name into the title and saved. That appeared under Source control in Gitlab, I wrote a comment in a field and clicked Commit. Then a Sync button appeared and I pressed that and it pushed the site onto the online repo. Now I need to type my passphrase twice every time that I push files to the online repo. That's annoying, so I'm going to generate a new SSH key and skip the passphrase. I followed \u00c1rni Bj\u00f6rnsson documentation to generate an RSA key and put it into my Gitlab profile. Every time I made major changes in Windows Explorer; deleted lots of files and moved others, I needed to generate a new SSH key. The last one was ssh-keygen -t rsa -b 2048 cat ~/.ssh/id_rsa.pub | clip The MkDocs convention is to put the built web page into a folder called site, but Gitlab needs the web site to be in a folder called public. Finally I found the site_dir setting , which I can change in mkdocs.yml so that I can rename the site folder to public . At first I manually renamed site to public and pushed the files to the Gitlab repo. I got into trouble when the dot in front of the file .gitlab-ci.yml was erased somehow and the site wasn't deployed. My instructor \u00de\u00f3rarinn found the problem and after that the site worked. .md-content__button { display: none; }","title":"Git setup"},{"location":"assignments/week02.html","text":"Computer-Aided Design Raster image editing My instructor \u00de\u00f3rarinn gave me a quick demo of how to isolate an object in an image and make the background transparent. That will probably come in handy in many situations. We'll use Photopea , a free online (and downloadable) photo editor that closely resembles Photoshop. You can even go through Photoshop tutorials in Photopea. Let's start by taking a picture of a roll of tape with flat lighting, and we'll try to avoid having shadows. I'll use the magic wand to select pixels with the a similar color as the pixel under the mouse. Then I'll invert the selection and finally I'll make a mask. Next I will use the brush tool to remove the rest of the background. I can hold Alt and the right mouse button to adjust the size and softness of the brush. Now I brushed away the rest of the background. But sometimes I accidentally brush away part of the object. Tip from \u00de\u00f3rarinn: If you brush away too much, you can press X and switch to the other color in the mask. Then you can brush the object back into the picture. Finally, it can be good to add a color fill layer. It can make it easier to see spots that you missed with the brush. You need to drag the color layer below the image layer in the menu on the right. Then the color is behind the image. Missed a bit! Raster image compression To compress the images for the web I installed ImageMagick for Windows from the website. The installation included FFmpeg. I couldn't find any info on the website on how to get started (for someone who doesn't use command line tools), but then I came across Aaron Logan's Fab Academy site from 2022. He recommended following along with this video and that did the trick for me. It was useful to see the trick of writing cmd in the address bar of the folder that includes the images, and then the Windows Command Prompt opens in that directory. Failure I entered magick convert 2023-02-02 15_22_45-Photopea _ Online Photo Editor.png -resize 40% -quality 80 output2.jpg convert: unable to open image '2023-02-02': No such file or directory @ error/blob.c/OpenBlob/3569. convert: no decode delegate for this image format `' @ error/constitute.c/ReadImage/741. convert: unable to open image '15_22_45-Photopea': No such file or directory @ error/blob.c/OpenBlob/3569. convert: unable to open image '15_22_45-Photopea': No such file or directory @ error/blob.c/OpenBlob/3569. convert: no decode delegate for this image format `' @ error/constitute.c/ReadImage/741. convert: unable to open image '_': No such file or directory @ error/blob.c/OpenBlob/3569. convert: unable to open image '_': No such file or directory @ error/blob.c/OpenBlob/3569. convert: no decode delegate for this image format `' @ error/constitute.c/ReadImage/741. convert: unable to open image 'Online': No such file or directory @ error/blob.c/OpenBlob/3569. convert: unable to open image 'Online': No such file or directory @ error/blob.c/OpenBlob/3569. convert: no decode delegate for this image format `' @ error/constitute.c/ReadImage/741. convert: unable to open image 'Photo': No such file or directory @ error/blob.c/OpenBlob/3569. convert: unable to open image 'Photo': No such file or directory @ error/blob.c/OpenBlob/3569. convert: no decode delegate for this image format `' @ error/constitute.c/ReadImage/741. convert: unable to open image 'Editor.png': No such file or directory @ error/blob.c/OpenBlob/3569. convert: unable to open image 'Editor.png': No such file or directory @ error/blob.c/OpenBlob/3569. convert: no images defined `output2.jpg' @ error/convert.c/ConvertImageCommand/3342. I guess ImageMagick didn't like the file name that the Greenshot screen grabbing tool generates. My instructor \u00de\u00f3rarinn recommended Greenshot to me, it's lightweight and because it freezes the screen, you can grab screenshots where your mouse shows up. I tried renaming one of the files to 1.png and then the ImageMagick compression command worked: C:\\code\\FabAcademyImagesUncompressed\\week02>magick convert 1.png -resize 40% -quality 80 output1.jpg But I wanted to convert a whole folder of images with one command. This batch resize tutorial showed my how to set the width of the resized image while keeping the original aspect ratio. Now I put all the uncompressed images for the week in one folder and run magick mogrify -resize 560 *.png to resize all the images in the folder to be 560 pixels wide. I didn't even have to rename them this time. Then I run magick mogrify -format jpg *.png to convert all the images in the folder to JPEG format. Be aware that mogrify doesn't copy the images but changes the originals. So I just make copies of them first and put them all in one folder before running the mogrify . Here's the mogrify documentation . To resize and compress single images as I go along, I use magick convert image.png -resize 560 image.jpg Here is the finished product: 3D modeling in Blender Since I can generally get Fusion 360 to do what I want it to, I decided to learn a little bit about Blender. I've been missing a way to edit STL meshes and model organic stuff, so here we go. Right after you click Download, this Youtube tutorial series appears on the Blender website. So I got started and acquainted myself with the viewport navigation. Remember that you can press F to go to full screen on Youtube. Rotate by pressing and holding the XYZ axes in the image. You rotate by holding down the scroll button on the mouse, just like in SolidWorks. So that's convenient. There's also a set of XYZ axes in the top right corner that you can grab and rotate like the Autodesk ViewCube. You can also click top, side and front views. Nice. Clicking and holding the little hand button next to the axes moves the center of rotation. And you zoom by scrolling the wheel. The little camera symbol next to the axes lets you look through the camera that you've put into the scene (you can also press 0 on the number pad). Someone in the Youtube comments mentioned that F12 renders the camera view. So 0 and then F12 is a quick way to render the model. Next to the camera icon is a grid symbol, which switches between orthographic mode (good for modeling) and perspective mode (good for rendering realistic images). It's good to keep an eye on the status bar on the bottom, which shows you the context you're working in at the moment. Ctrl + Space is a convenient way to maximize any viewport or panel in the interface, and then you can press Ctrl + Space again to return to the default setup: Default interface (left) and Ctrl + Space to maximize side panel (right). There are several tabs at the top that represent different workspaces. In each workspace, the panels in the Blender interface have been set up for that particular task: All the viewports and panels can be resized. To choose a different panel, click the symbol with the down arrow in its top left corner: In the Layout workspace, pressing T will open commonly used tools like Move and Scale, and pressing ++N++ will open info about the selected object, tool-specific options and view options. Left menu is toggled with T , right menu is toggled with N . Notice the little tabs on the right side of the right menu called Item, Tool and View. In the View tab, you can edit the position of the 3D cursor, which acts as the spawning point for new objects and you can do things like pivot objects around it. In the Layout workspace, the Timeline below the viewport controls the playback of the animation. You can scroll to zoom in and out and hold the scroll wheel to pan. You can define start and end points and add key frames. Warning DON'T MAKE TUTORIALS In our weekly local Fab Academy meeting in Iceland, I mentioned how long the documentation was taking and that I wasn't getting anywhere with Blender. My instructor \u00de\u00f3rarinn said that I shouldn't be making a tutorial on how to use Blender but rather summarize what I did, what was interesting and then link to the tutorial that I used. That's a relief. The rest of this page will be more concise and interesting. I ended up quitting the official Blender tutorial after the third video and starting the doughnut tutorial by Blender Guru on Youtube. I remember things much better when I have a goal in mind and just use the tools I need to achieve it. I followed videos 1 through 6. I haven't modeled anything organic before, so it was interesting to try make the doughnut lopsided and kind of realistic. It goes against my engineering training, where you usually want things to be symmetrical. I liked getting to know subdivision modeling, to make smooth object using relatively few polygons. And I really liked that my 3D mouse works in Blender. The few times when I've attempted to edit a mesh, I've moved single vertices, resulting in gnarly, pointy looking objects. So I always gave up! But now that I know about proportional editing in Blender, I can select a single vertex and the vertices close to it will follow, while those further away will be affected less. This is much quicker and more effective. I love it! And thinking about my final project, the robot arm, it would be lovely to make a keypoint editor to move it, and use proportional editing to move the keypoints around the moved point, to get a smooth rolloff. I also want to remember the Alt + S shortcut to make the faces move directly outward (normal to the faces). I still have some way to go before I'll be confident in my mesh editing skills. Here's my first attempt to make the icing on the doughnut droop: Side view looks good, top view reveals what's actually going on. After some backtracking, the icing started to look good by direct editing of the vertices. Next I tried a bit of sculpting. That means using brushes to edit the mesh. I was able to make drops in the icing by sculpting it. Rendering in Blender I tried adjusting the camera view and the light to get a good result. I tried bot the Eevee real-time rendering engine and the more realistic, but much slower, Cycles ray-tracing rendering engine. The rendering was quite slow until I enabled the CUDA graphics card in the laptop. Final result The doughnut Blender file is 6.8 MB, so I won't include it here. Make your own doughnut instead, it takes about 2 hours and then you'll be able to model cool stuff in Blender. Blender simulation I also went through a really quick tutorial on cloth simulation. Here is the result: It's fascinating! There are doors opening for me left and right! Blender is a pretty incredible piece of sofware. The Blender file is 20.2 MB, so I won't include it here. You can make your own in ten minutes plus maybe 30 minutes of render time. 2D design in Inkscape I've started using Inkscape a bit since I started working at Fab Lab \u00cdsafj\u00f6r\u00f0ur. But I haven't used the Clone tool before. Here's a little experiment with cloning a rectangle: I didn't save the Inkscape file, it wasn't very interesting. 2D gear design in Fusion 360 \\[ c_{pitch} = width_{tooth} \\] Antimony modeler I set up a Raspberry Pi in the case that I need to use Linux during the course. I used this tutorial to enable cloud connection. That way I can connect to the Raspberry Pi from outside its network. When I connected remotely for the first time, the screen resolution was tiny because the Raspberry Pi wasn't connected to a monitor. I followed this tutorial to hardcode the screen resolution into the boot config text file. I tried to build Matt Keeter's Antimony modeler on the Raspberry Pi, but without luck. Failure sudo apt install git build-essential libpng-dev python3-dev libboost-all-dev libgl1-mesa-dev lemon flex qt5-default ninja-build cmake Reading package lists... Done Building dependency tree... Done Reading state information... Done Package qt5-default is not available, but is referred to by another package. This may mean that the package is missing, has been obsoleted, or is only available from another source E: Package 'qt5-default' has no installation candidate I tried to install qt5 instead of qt5-default, but it didn't work. Failure sudo apt install git build-essential qt5 Reading package lists... Done Building dependency tree... Done Reading state information... Done E: Unable to locate package qt5 I don't know where to go from there. Kokopelli modeler I also tried to build Matt Keeter's Kokopelli from source on the Raspberry Pi, but also without luck: Failure ``` sudo apt-get install python python-dev python-pip gcc g++ libpng12-dev make bash cmake Reading package lists... Done Building dependency tree... Done Reading state information... Done Note, selecting 'python-is-python2' instead of 'python' Note, selecting 'python-dev-is-python2' instead of 'python-dev' Package libpng12-dev is not available, but is referred to by another package. his may mean that the package is missing, has been obsoleted, or is only available from another source Package python-pip is not available, but is referred to by another package. This may mean that the package is missing, has been obsoleted, or is only available from another source However the following packages replace it: python3-pip E: Package 'python-pip' has no installation candidate E: Package 'libpng12-dev' has no installation candidate ``` Voxel modeling I tried the MagicaVoxel program without looking at a tutorial, and made this figure: I'm not sure how to make anything useful with this modeling method. Maybe I'll think of something later. Here's the voxel guy I made: Download voxel guy Solvespace parametric 3D CAD I went through a Solvespace tutorial and quite liked the experience. I like the dark 8-bit look of it. I was also surprised to find that my 3D mouse works in Solvespace. The program is lightweight and modeling seems quick, once you've memorized a few keyboard shortcuts and familiarized yourself with how the sketch constraints work. In the time that it took Fusion 360 to open, I saved the bracket and exported it as STEP, STL and a triangle mesh with a Three.js html viewer. You can open the 3D model in the browser below! Open bracket 3D model in browser Solvespace was written by Jonathan Westhues and he's made other impressive things too. This tube joint pattern generator would have been a lifesaver when we were building the steel tube spaceframe for the first and second Team Spark electric racing cars back in 2011 and 2012. Solvespace was maintained for a few years by M-Labs. M-Labs wrote the Three.js export feature, among others. Jonathan says himself that Solvespace's NURBS operations are not as good as OpenCASCADE's but they're much smaller. The constraint solver is a remarkable achievement though, and it works well. Jonathan originally made a 2D CAD program which was superseded by Solvespace, but the paper he wrote on the sketch solver is an interesting read. The Solvespace solver library is the solver behind CAD Sketcher in Blender. The Solvespace feature list mentions G-code export with cutter radius compensation and path traced with mechanism, exportable into a spreadsheet. These two are interesting. The next thing I want to try in Solvespace is to make a movable assembly. For me the killer feature in Solvespace is the Three.js export. The in-browser Three.js model even has the Solvespace look! The file includes the whole Three.js library, which means that the file for this simple bracket is 0.7 MB. So if you want to display more than one model they will take up a lot of space. In that case you may want to export only the js model geometries from Solvespace and then manually link them to the three.js file. The bracket model geometry is only 52 KB. Here's the Solvespace model for download: Download Solvespace bracket Rhino3D Rhino can do pretty much everything. A full license costs a thousand euros and you keep that version of Rhino for life. Even better, students and educators can get a license for 200 euros. But first, I'm going to get the trial version, which lasts for 90 days (all of the Fab Academy). I've wanted to try Rhino for quite some time, but all those unfamiliar tools and menus are a bit intimidating. I know solid extrude must be there, but where? I didn't like the official Getting Started tutorials very much, because the first videos just show you things but not making you do anything in the software. So I went to Youtube and found a 40 minute introduction to Rhino for architecture students. I followed along for 17 minutes. Selecting a surface with Ctrl + Shift left mouse in the first image and dragging the black dot on the red axis. That leads to the extrusion in the second image. I learned to make lines, curves, surfaces, solids and how to manipulate them.Now that I've turned on the Gumball tool, I can just select a surface and extrude it by pulling on the dot on one of the Gumball tool axes. Nice! In the above picture I'm rotating a surface, which changes the whole shape. Rhino seems to have many more tools than Fusion 360, so it's interesting for making complicated shapes. I especially like the ability to grab any point, line or surface and move and rotate them with the Gumball tool. That's a really quick way to make interesting shapes that I would find difficult to model in Fusion 360. But I still haven't learned how to model precise things in Rhino with dimensions and constraints. Here's the Rhino file I made (it's just nonsense like you see in the images): Download Rhino model Rhino + Grasshopper I went through a quick tutorial and made a nice Voronoi pattern! I really like the possibilities in Grasshopper. I've wanted to try it for some time. And I like the simplicity of the Voronoi method, you just make a line midway between every point and its next neighbor, and then trim the lines. A random distribution of points results in a cellular-like pattern. Here's the Rhino file with the extruded Voronoi surface.: Download Rhino + Grasshopper Voronoi model FreeCAD After watching this video , I got excited about learning FreeCAD, but I'm 20 minutes in to a tutorial and I've only learned how to enable dark mode, and I've discovered that my 3D mouse doesn't work well with the program. Or maybe I haven't found the right settings. Update: Now I've started modeling and I'm starting to like FreeCAD. It's incredibly capable. I only stuck with it because Neil Gershenfeld is so adamant that it's a good tool and that his Fab Academy students should try it. The feature tree is very similar to Inventor and SolidWorks. You can go back in time and change features that control features later in the tree. I could get used to this. Hold on! There's an HTML export option! And it uses native WebGL, without the Three.js library on top of it. And the bracket model is a tiny 60 KB, including the viewer! Look! Open FreeCAD bracket in browser The HTML file is human-readable, so I could easily go into it and change the background gradient colors. The bracket itself still has a yellowish color on the bottom, but this is good enough for now. Open FreeCAD bracket with greytone background Here's the FreeCAD bracket model: Download FreeCAD bracket model Update: I also tried setting up a parametric model based on a spreadsheet. I followed this tutorial . Here's the model that includes a few configurations: Download configurable FreeCAD cube I would also like to try Python scripting in FreeCAD when I have the time. On free and open source engineering software I must admit that I've been prejudiced against free and open source versions of the engineering software that I've been using. If it's free there must be something wrong with it. I've assumed that it must be missing lots of features and that it must have a bad user interface and be riddled with bugs. Not so! And there are features in the free software that are not found in paid software at any price. Autodesk and Dassault Syst\u00e9mes, the makers of the CAD software I use most, have thousands of employees. FreeCAD is developed by like three people and it can do finite element analysis! How is this possible? Because of the ecosystem of open-source packages that get integrated into FreeCAD, like the Open SCAD workbench, for example. And the open nature of the software ensures that those who are interested can learn everything about it, down to the lowest level, and improve it and make their own versions of it. This is similar to the original purpose of published patents, to disseminate technology for everyone. It's interesting to note that an old open source project that was developed in the late 1960s is still the state of the art in structural Finite Element Analysis . Nastran is a million lines of FORTRAN code written for NASA by the company that would become MSC. Nastran is the core in the most advanced FEA solvers in Autodesk Inventor and even in more exotic and cutting-edge software like ADINA , which is used for the most difficult multiphysics problems like simulating nuclear reactors before they are built. I came across ADINA in my quest to understand shock absorbers . They are surprisingly complex. ADINA makes an FEA solver that is more advanced than the Siemens NX solver, and that is saying something. NX is arguably the most advanced CAD software in the world. Its geometry kernel is also the basis of the Onshape, Shapr3D and Altair CAD software. CADtron I didn't try Kevin Lynagh's CADtron because it hasn't been released yet, but it's an interesting gesture-based 2D CAD program based on the Solvespace library. Here's an example of a perpendicular constraint drawn with a stylus (from Kevin's video): Fusion 360 Final project mockup I made a mockup of my final project in Fusion 360. I connected the parts together with joints, so that I could move the arm realistically. This arm has six axes. I imported a few electronic components from GrabCAD and put them on the arm to show how I envision the design. I want to make the arm out of printed circuit boards. The are fiber reinforced, so they are quite stiff. If I place some rigid foam between two circuit boards I have a very stiff and light part that also contains the microcontroller and motor driver to control the joint. I haven't seen a robot arm made of PCBs anywhere, so that's what I want to make. My instructor \u00de\u00f3rarinn suggested that I think about designing the arm in such a way that it could also be laser cut or 3D printed, depending on the fabrication tools that people have access to. So here are three versions, rendered in the Fusion 360 rendering environment: Tan colored FR1 circuit board arm Red 3D printed arm Transparent acrylic laser cut arm Motion study I had trouble figuring out the Fusion 360 Animation environment, so I did a motion study instead. Before making the motion study, I needed to create rotational joints between the parts using the Joint operation (see in the menu above). OK, let's go back and define all the joints first, and make the base fixed to the ground, so that it doesn't rotate with the other parts. It's quite simple to select the hole on each part to rotate around and that's it, really. You may need to play around with some of the settings until you get what you want. But there aren't that many settings. All the joints that I defined automatically appear in the motion study as a colored line. I can place keypoints on those lines at certain times to create motion in those joints. Then it's just a matter of figuring out which joint is which and its extents of motion and playing around with the interface until the movement looks good. Note To capture short animated GIFs of what I'm doing in software, I use LICEcap . It's available for Windows and MacOS. It's lightweight and produces much smaller GIFs than any other screen capture program that I've used. Because of this recommendation , I used the command line tool Gifsicle to resize the GIF after recording it with LICECap . I tried double-clicking gifsicle.exe, but nothing happened. I found this Reddit discussion , where it was explained that Gifsicle is a command-line tool, so you just enter the commands. But where? I tried the same method as with ImageMagick. I put the GIF into the folder with Gifsicle and typed cmd Enter , which opened the Command Prompt in that folder. Then I ended up using the command gifsicle motion_study5.gif --resize 660x220 --colors 256 --optimize > motion_study5_resized.gif and that worked! The GIF went from 844 KB to 200 KB. I quite like the control interface for the motion study. Each line controls a joint. You click the line to make a keypoint and enter the position of that joint. This is more intuitive than the robot control software that I've tried. It would be nice to control the arm in this way. Someone on the internet said that Fusion 360 is not the right tool to control robot arms, and they're probably right. They recommended Blender. I've been thinking about writing a Python script for Blender that uses the animation environment to control my robot arm. Or I could try a browser-based solution . I saw something like this when searching the Fab Academy archives. Here's the robot arm Fusion 360 file, including the conveyor belt and motion study: Download robot arm mockup .md-content__button { display: none; }","title":"2. Computer-Aided Design"},{"location":"assignments/week02.html#computer-aided-design","text":"","title":"Computer-Aided Design   "},{"location":"assignments/week02.html#raster-image-editing","text":"My instructor \u00de\u00f3rarinn gave me a quick demo of how to isolate an object in an image and make the background transparent. That will probably come in handy in many situations. We'll use Photopea , a free online (and downloadable) photo editor that closely resembles Photoshop. You can even go through Photoshop tutorials in Photopea. Let's start by taking a picture of a roll of tape with flat lighting, and we'll try to avoid having shadows. I'll use the magic wand to select pixels with the a similar color as the pixel under the mouse. Then I'll invert the selection and finally I'll make a mask. Next I will use the brush tool to remove the rest of the background. I can hold Alt and the right mouse button to adjust the size and softness of the brush. Now I brushed away the rest of the background. But sometimes I accidentally brush away part of the object. Tip from \u00de\u00f3rarinn: If you brush away too much, you can press X and switch to the other color in the mask. Then you can brush the object back into the picture. Finally, it can be good to add a color fill layer. It can make it easier to see spots that you missed with the brush. You need to drag the color layer below the image layer in the menu on the right. Then the color is behind the image. Missed a bit! Raster image compression To compress the images for the web I installed ImageMagick for Windows from the website. The installation included FFmpeg. I couldn't find any info on the website on how to get started (for someone who doesn't use command line tools), but then I came across Aaron Logan's Fab Academy site from 2022. He recommended following along with this video and that did the trick for me. It was useful to see the trick of writing cmd in the address bar of the folder that includes the images, and then the Windows Command Prompt opens in that directory. Failure I entered magick convert 2023-02-02 15_22_45-Photopea _ Online Photo Editor.png -resize 40% -quality 80 output2.jpg convert: unable to open image '2023-02-02': No such file or directory @ error/blob.c/OpenBlob/3569. convert: no decode delegate for this image format `' @ error/constitute.c/ReadImage/741. convert: unable to open image '15_22_45-Photopea': No such file or directory @ error/blob.c/OpenBlob/3569. convert: unable to open image '15_22_45-Photopea': No such file or directory @ error/blob.c/OpenBlob/3569. convert: no decode delegate for this image format `' @ error/constitute.c/ReadImage/741. convert: unable to open image '_': No such file or directory @ error/blob.c/OpenBlob/3569. convert: unable to open image '_': No such file or directory @ error/blob.c/OpenBlob/3569. convert: no decode delegate for this image format `' @ error/constitute.c/ReadImage/741. convert: unable to open image 'Online': No such file or directory @ error/blob.c/OpenBlob/3569. convert: unable to open image 'Online': No such file or directory @ error/blob.c/OpenBlob/3569. convert: no decode delegate for this image format `' @ error/constitute.c/ReadImage/741. convert: unable to open image 'Photo': No such file or directory @ error/blob.c/OpenBlob/3569. convert: unable to open image 'Photo': No such file or directory @ error/blob.c/OpenBlob/3569. convert: no decode delegate for this image format `' @ error/constitute.c/ReadImage/741. convert: unable to open image 'Editor.png': No such file or directory @ error/blob.c/OpenBlob/3569. convert: unable to open image 'Editor.png': No such file or directory @ error/blob.c/OpenBlob/3569. convert: no images defined `output2.jpg' @ error/convert.c/ConvertImageCommand/3342. I guess ImageMagick didn't like the file name that the Greenshot screen grabbing tool generates. My instructor \u00de\u00f3rarinn recommended Greenshot to me, it's lightweight and because it freezes the screen, you can grab screenshots where your mouse shows up. I tried renaming one of the files to 1.png and then the ImageMagick compression command worked: C:\\code\\FabAcademyImagesUncompressed\\week02>magick convert 1.png -resize 40% -quality 80 output1.jpg But I wanted to convert a whole folder of images with one command. This batch resize tutorial showed my how to set the width of the resized image while keeping the original aspect ratio. Now I put all the uncompressed images for the week in one folder and run magick mogrify -resize 560 *.png to resize all the images in the folder to be 560 pixels wide. I didn't even have to rename them this time. Then I run magick mogrify -format jpg *.png to convert all the images in the folder to JPEG format. Be aware that mogrify doesn't copy the images but changes the originals. So I just make copies of them first and put them all in one folder before running the mogrify . Here's the mogrify documentation . To resize and compress single images as I go along, I use magick convert image.png -resize 560 image.jpg Here is the finished product:","title":"Raster image editing"},{"location":"assignments/week02.html#3d-modeling-in-blender","text":"Since I can generally get Fusion 360 to do what I want it to, I decided to learn a little bit about Blender. I've been missing a way to edit STL meshes and model organic stuff, so here we go. Right after you click Download, this Youtube tutorial series appears on the Blender website. So I got started and acquainted myself with the viewport navigation. Remember that you can press F to go to full screen on Youtube. Rotate by pressing and holding the XYZ axes in the image. You rotate by holding down the scroll button on the mouse, just like in SolidWorks. So that's convenient. There's also a set of XYZ axes in the top right corner that you can grab and rotate like the Autodesk ViewCube. You can also click top, side and front views. Nice. Clicking and holding the little hand button next to the axes moves the center of rotation. And you zoom by scrolling the wheel. The little camera symbol next to the axes lets you look through the camera that you've put into the scene (you can also press 0 on the number pad). Someone in the Youtube comments mentioned that F12 renders the camera view. So 0 and then F12 is a quick way to render the model. Next to the camera icon is a grid symbol, which switches between orthographic mode (good for modeling) and perspective mode (good for rendering realistic images). It's good to keep an eye on the status bar on the bottom, which shows you the context you're working in at the moment. Ctrl + Space is a convenient way to maximize any viewport or panel in the interface, and then you can press Ctrl + Space again to return to the default setup: Default interface (left) and Ctrl + Space to maximize side panel (right). There are several tabs at the top that represent different workspaces. In each workspace, the panels in the Blender interface have been set up for that particular task: All the viewports and panels can be resized. To choose a different panel, click the symbol with the down arrow in its top left corner: In the Layout workspace, pressing T will open commonly used tools like Move and Scale, and pressing ++N++ will open info about the selected object, tool-specific options and view options. Left menu is toggled with T , right menu is toggled with N . Notice the little tabs on the right side of the right menu called Item, Tool and View. In the View tab, you can edit the position of the 3D cursor, which acts as the spawning point for new objects and you can do things like pivot objects around it. In the Layout workspace, the Timeline below the viewport controls the playback of the animation. You can scroll to zoom in and out and hold the scroll wheel to pan. You can define start and end points and add key frames. Warning DON'T MAKE TUTORIALS In our weekly local Fab Academy meeting in Iceland, I mentioned how long the documentation was taking and that I wasn't getting anywhere with Blender. My instructor \u00de\u00f3rarinn said that I shouldn't be making a tutorial on how to use Blender but rather summarize what I did, what was interesting and then link to the tutorial that I used. That's a relief. The rest of this page will be more concise and interesting. I ended up quitting the official Blender tutorial after the third video and starting the doughnut tutorial by Blender Guru on Youtube. I remember things much better when I have a goal in mind and just use the tools I need to achieve it. I followed videos 1 through 6. I haven't modeled anything organic before, so it was interesting to try make the doughnut lopsided and kind of realistic. It goes against my engineering training, where you usually want things to be symmetrical. I liked getting to know subdivision modeling, to make smooth object using relatively few polygons. And I really liked that my 3D mouse works in Blender. The few times when I've attempted to edit a mesh, I've moved single vertices, resulting in gnarly, pointy looking objects. So I always gave up! But now that I know about proportional editing in Blender, I can select a single vertex and the vertices close to it will follow, while those further away will be affected less. This is much quicker and more effective. I love it! And thinking about my final project, the robot arm, it would be lovely to make a keypoint editor to move it, and use proportional editing to move the keypoints around the moved point, to get a smooth rolloff. I also want to remember the Alt + S shortcut to make the faces move directly outward (normal to the faces). I still have some way to go before I'll be confident in my mesh editing skills. Here's my first attempt to make the icing on the doughnut droop: Side view looks good, top view reveals what's actually going on. After some backtracking, the icing started to look good by direct editing of the vertices. Next I tried a bit of sculpting. That means using brushes to edit the mesh. I was able to make drops in the icing by sculpting it.","title":"3D modeling in Blender"},{"location":"assignments/week02.html#rendering-in-blender","text":"I tried adjusting the camera view and the light to get a good result. I tried bot the Eevee real-time rendering engine and the more realistic, but much slower, Cycles ray-tracing rendering engine. The rendering was quite slow until I enabled the CUDA graphics card in the laptop. Final result The doughnut Blender file is 6.8 MB, so I won't include it here. Make your own doughnut instead, it takes about 2 hours and then you'll be able to model cool stuff in Blender.","title":"Rendering in Blender"},{"location":"assignments/week02.html#blender-simulation","text":"I also went through a really quick tutorial on cloth simulation. Here is the result: It's fascinating! There are doors opening for me left and right! Blender is a pretty incredible piece of sofware. The Blender file is 20.2 MB, so I won't include it here. You can make your own in ten minutes plus maybe 30 minutes of render time.","title":"Blender simulation"},{"location":"assignments/week02.html#2d-design-in-inkscape","text":"I've started using Inkscape a bit since I started working at Fab Lab \u00cdsafj\u00f6r\u00f0ur. But I haven't used the Clone tool before. Here's a little experiment with cloning a rectangle: I didn't save the Inkscape file, it wasn't very interesting.","title":"2D design in Inkscape"},{"location":"assignments/week02.html#2d-gear-design-in-fusion-360","text":"\\[ c_{pitch} = width_{tooth} \\]","title":"2D gear design in Fusion 360"},{"location":"assignments/week02.html#antimony-modeler","text":"I set up a Raspberry Pi in the case that I need to use Linux during the course. I used this tutorial to enable cloud connection. That way I can connect to the Raspberry Pi from outside its network. When I connected remotely for the first time, the screen resolution was tiny because the Raspberry Pi wasn't connected to a monitor. I followed this tutorial to hardcode the screen resolution into the boot config text file. I tried to build Matt Keeter's Antimony modeler on the Raspberry Pi, but without luck. Failure sudo apt install git build-essential libpng-dev python3-dev libboost-all-dev libgl1-mesa-dev lemon flex qt5-default ninja-build cmake Reading package lists... Done Building dependency tree... Done Reading state information... Done Package qt5-default is not available, but is referred to by another package. This may mean that the package is missing, has been obsoleted, or is only available from another source E: Package 'qt5-default' has no installation candidate I tried to install qt5 instead of qt5-default, but it didn't work. Failure sudo apt install git build-essential qt5 Reading package lists... Done Building dependency tree... Done Reading state information... Done E: Unable to locate package qt5 I don't know where to go from there.","title":"Antimony modeler"},{"location":"assignments/week02.html#kokopelli-modeler","text":"I also tried to build Matt Keeter's Kokopelli from source on the Raspberry Pi, but also without luck: Failure ``` sudo apt-get install python python-dev python-pip gcc g++ libpng12-dev make bash cmake Reading package lists... Done Building dependency tree... Done Reading state information... Done Note, selecting 'python-is-python2' instead of 'python' Note, selecting 'python-dev-is-python2' instead of 'python-dev' Package libpng12-dev is not available, but is referred to by another package. his may mean that the package is missing, has been obsoleted, or is only available from another source Package python-pip is not available, but is referred to by another package. This may mean that the package is missing, has been obsoleted, or is only available from another source However the following packages replace it: python3-pip E: Package 'python-pip' has no installation candidate E: Package 'libpng12-dev' has no installation candidate ```","title":"Kokopelli modeler"},{"location":"assignments/week02.html#voxel-modeling","text":"I tried the MagicaVoxel program without looking at a tutorial, and made this figure: I'm not sure how to make anything useful with this modeling method. Maybe I'll think of something later. Here's the voxel guy I made: Download voxel guy","title":"Voxel modeling"},{"location":"assignments/week02.html#solvespace-parametric-3d-cad","text":"I went through a Solvespace tutorial and quite liked the experience. I like the dark 8-bit look of it. I was also surprised to find that my 3D mouse works in Solvespace. The program is lightweight and modeling seems quick, once you've memorized a few keyboard shortcuts and familiarized yourself with how the sketch constraints work. In the time that it took Fusion 360 to open, I saved the bracket and exported it as STEP, STL and a triangle mesh with a Three.js html viewer. You can open the 3D model in the browser below! Open bracket 3D model in browser Solvespace was written by Jonathan Westhues and he's made other impressive things too. This tube joint pattern generator would have been a lifesaver when we were building the steel tube spaceframe for the first and second Team Spark electric racing cars back in 2011 and 2012. Solvespace was maintained for a few years by M-Labs. M-Labs wrote the Three.js export feature, among others. Jonathan says himself that Solvespace's NURBS operations are not as good as OpenCASCADE's but they're much smaller. The constraint solver is a remarkable achievement though, and it works well. Jonathan originally made a 2D CAD program which was superseded by Solvespace, but the paper he wrote on the sketch solver is an interesting read. The Solvespace solver library is the solver behind CAD Sketcher in Blender. The Solvespace feature list mentions G-code export with cutter radius compensation and path traced with mechanism, exportable into a spreadsheet. These two are interesting. The next thing I want to try in Solvespace is to make a movable assembly. For me the killer feature in Solvespace is the Three.js export. The in-browser Three.js model even has the Solvespace look! The file includes the whole Three.js library, which means that the file for this simple bracket is 0.7 MB. So if you want to display more than one model they will take up a lot of space. In that case you may want to export only the js model geometries from Solvespace and then manually link them to the three.js file. The bracket model geometry is only 52 KB. Here's the Solvespace model for download: Download Solvespace bracket","title":"Solvespace parametric 3D CAD"},{"location":"assignments/week02.html#rhino3d","text":"Rhino can do pretty much everything. A full license costs a thousand euros and you keep that version of Rhino for life. Even better, students and educators can get a license for 200 euros. But first, I'm going to get the trial version, which lasts for 90 days (all of the Fab Academy). I've wanted to try Rhino for quite some time, but all those unfamiliar tools and menus are a bit intimidating. I know solid extrude must be there, but where? I didn't like the official Getting Started tutorials very much, because the first videos just show you things but not making you do anything in the software. So I went to Youtube and found a 40 minute introduction to Rhino for architecture students. I followed along for 17 minutes. Selecting a surface with Ctrl + Shift left mouse in the first image and dragging the black dot on the red axis. That leads to the extrusion in the second image. I learned to make lines, curves, surfaces, solids and how to manipulate them.Now that I've turned on the Gumball tool, I can just select a surface and extrude it by pulling on the dot on one of the Gumball tool axes. Nice! In the above picture I'm rotating a surface, which changes the whole shape. Rhino seems to have many more tools than Fusion 360, so it's interesting for making complicated shapes. I especially like the ability to grab any point, line or surface and move and rotate them with the Gumball tool. That's a really quick way to make interesting shapes that I would find difficult to model in Fusion 360. But I still haven't learned how to model precise things in Rhino with dimensions and constraints. Here's the Rhino file I made (it's just nonsense like you see in the images): Download Rhino model","title":"Rhino3D"},{"location":"assignments/week02.html#rhino-grasshopper","text":"I went through a quick tutorial and made a nice Voronoi pattern! I really like the possibilities in Grasshopper. I've wanted to try it for some time. And I like the simplicity of the Voronoi method, you just make a line midway between every point and its next neighbor, and then trim the lines. A random distribution of points results in a cellular-like pattern. Here's the Rhino file with the extruded Voronoi surface.: Download Rhino + Grasshopper Voronoi model","title":"Rhino + Grasshopper"},{"location":"assignments/week02.html#freecad","text":"After watching this video , I got excited about learning FreeCAD, but I'm 20 minutes in to a tutorial and I've only learned how to enable dark mode, and I've discovered that my 3D mouse doesn't work well with the program. Or maybe I haven't found the right settings. Update: Now I've started modeling and I'm starting to like FreeCAD. It's incredibly capable. I only stuck with it because Neil Gershenfeld is so adamant that it's a good tool and that his Fab Academy students should try it. The feature tree is very similar to Inventor and SolidWorks. You can go back in time and change features that control features later in the tree. I could get used to this. Hold on! There's an HTML export option! And it uses native WebGL, without the Three.js library on top of it. And the bracket model is a tiny 60 KB, including the viewer! Look! Open FreeCAD bracket in browser The HTML file is human-readable, so I could easily go into it and change the background gradient colors. The bracket itself still has a yellowish color on the bottom, but this is good enough for now. Open FreeCAD bracket with greytone background Here's the FreeCAD bracket model: Download FreeCAD bracket model Update: I also tried setting up a parametric model based on a spreadsheet. I followed this tutorial . Here's the model that includes a few configurations: Download configurable FreeCAD cube I would also like to try Python scripting in FreeCAD when I have the time.","title":"FreeCAD"},{"location":"assignments/week02.html#on-free-and-open-source-engineering-software","text":"I must admit that I've been prejudiced against free and open source versions of the engineering software that I've been using. If it's free there must be something wrong with it. I've assumed that it must be missing lots of features and that it must have a bad user interface and be riddled with bugs. Not so! And there are features in the free software that are not found in paid software at any price. Autodesk and Dassault Syst\u00e9mes, the makers of the CAD software I use most, have thousands of employees. FreeCAD is developed by like three people and it can do finite element analysis! How is this possible? Because of the ecosystem of open-source packages that get integrated into FreeCAD, like the Open SCAD workbench, for example. And the open nature of the software ensures that those who are interested can learn everything about it, down to the lowest level, and improve it and make their own versions of it. This is similar to the original purpose of published patents, to disseminate technology for everyone. It's interesting to note that an old open source project that was developed in the late 1960s is still the state of the art in structural Finite Element Analysis . Nastran is a million lines of FORTRAN code written for NASA by the company that would become MSC. Nastran is the core in the most advanced FEA solvers in Autodesk Inventor and even in more exotic and cutting-edge software like ADINA , which is used for the most difficult multiphysics problems like simulating nuclear reactors before they are built. I came across ADINA in my quest to understand shock absorbers . They are surprisingly complex. ADINA makes an FEA solver that is more advanced than the Siemens NX solver, and that is saying something. NX is arguably the most advanced CAD software in the world. Its geometry kernel is also the basis of the Onshape, Shapr3D and Altair CAD software.","title":"On free and open source engineering software"},{"location":"assignments/week02.html#cadtron","text":"I didn't try Kevin Lynagh's CADtron because it hasn't been released yet, but it's an interesting gesture-based 2D CAD program based on the Solvespace library. Here's an example of a perpendicular constraint drawn with a stylus (from Kevin's video):","title":"CADtron"},{"location":"assignments/week02.html#fusion-360","text":"","title":"Fusion 360"},{"location":"assignments/week02.html#final-project-mockup","text":"I made a mockup of my final project in Fusion 360. I connected the parts together with joints, so that I could move the arm realistically. This arm has six axes. I imported a few electronic components from GrabCAD and put them on the arm to show how I envision the design. I want to make the arm out of printed circuit boards. The are fiber reinforced, so they are quite stiff. If I place some rigid foam between two circuit boards I have a very stiff and light part that also contains the microcontroller and motor driver to control the joint. I haven't seen a robot arm made of PCBs anywhere, so that's what I want to make. My instructor \u00de\u00f3rarinn suggested that I think about designing the arm in such a way that it could also be laser cut or 3D printed, depending on the fabrication tools that people have access to. So here are three versions, rendered in the Fusion 360 rendering environment: Tan colored FR1 circuit board arm Red 3D printed arm Transparent acrylic laser cut arm","title":"Final project mockup"},{"location":"assignments/week02.html#motion-study","text":"I had trouble figuring out the Fusion 360 Animation environment, so I did a motion study instead. Before making the motion study, I needed to create rotational joints between the parts using the Joint operation (see in the menu above). OK, let's go back and define all the joints first, and make the base fixed to the ground, so that it doesn't rotate with the other parts. It's quite simple to select the hole on each part to rotate around and that's it, really. You may need to play around with some of the settings until you get what you want. But there aren't that many settings. All the joints that I defined automatically appear in the motion study as a colored line. I can place keypoints on those lines at certain times to create motion in those joints. Then it's just a matter of figuring out which joint is which and its extents of motion and playing around with the interface until the movement looks good. Note To capture short animated GIFs of what I'm doing in software, I use LICEcap . It's available for Windows and MacOS. It's lightweight and produces much smaller GIFs than any other screen capture program that I've used. Because of this recommendation , I used the command line tool Gifsicle to resize the GIF after recording it with LICECap . I tried double-clicking gifsicle.exe, but nothing happened. I found this Reddit discussion , where it was explained that Gifsicle is a command-line tool, so you just enter the commands. But where? I tried the same method as with ImageMagick. I put the GIF into the folder with Gifsicle and typed cmd Enter , which opened the Command Prompt in that folder. Then I ended up using the command gifsicle motion_study5.gif --resize 660x220 --colors 256 --optimize > motion_study5_resized.gif and that worked! The GIF went from 844 KB to 200 KB. I quite like the control interface for the motion study. Each line controls a joint. You click the line to make a keypoint and enter the position of that joint. This is more intuitive than the robot control software that I've tried. It would be nice to control the arm in this way. Someone on the internet said that Fusion 360 is not the right tool to control robot arms, and they're probably right. They recommended Blender. I've been thinking about writing a Python script for Blender that uses the animation environment to control my robot arm. Or I could try a browser-based solution . I saw something like this when searching the Fab Academy archives. Here's the robot arm Fusion 360 file, including the conveyor belt and motion study: Download robot arm mockup .md-content__button { display: none; }","title":"Motion study"},{"location":"assignments/week03.html","text":"Computer-Controlled Cutting Parametric construction kit I wanted to make a minimal parametric construction kit that was made up of only one piece. You can see the design taking shape in the hand-drawn sketches below as I was thinking about the design specifications. When I had decided on the specs, the design was fully defined. The piece can have no other shape. I wanted four pins and four pockets, so that defined the shape of the piece. The plan was to have the dimensions of the pins and pockets equal on all three axes, so that the pieces can be assembled every which way. This ultimately means that all the dimensions in the 2D sketch are controlled by the material thickness. I made a sketch in FreeCAD and set up a spreadsheet inside the part file. In the spreadsheet I defined two main parameters that I use to control the design; the material thickness and the kerf. There I am editing the value of the kerf: I made two test pieces (see the image at the top of the page) and decided to make the hole in the middle a star instead of a square. That way you can assemble the pieces at a 45\u00b0 angle, which is necessary because the angle between every pin and pocket is 45\u00b0. Then I made a series of kerf tests, which was easy because I only needed to change one parameter. I wound up making the fit tighter than I usually do (kerf = 0.24 mm) because the fit is tighter when the pieces are assembled in-plane than when they are perpendicular to each other. The 90\u00b0 perpendicular fits were always too loose and I didn't understand why. Kerf tests Then I thought about the way that the kerf tapers, so that more material is removed at the bottom of the material than at the top surface. This is because the laser has only one focus point, which is set at the top surface of the material. Below that point, the laser beam diverges, causing the sides of the pieces to not be completely perpendicular to the surface of the sheet. My instructor \u00de\u00f3rarinn said that there isn't really anything we can do about that, so I tried setting the focus in the middle of the material, by focusing on a 2 mm thick sheet and then cutting a 4 mm thick sheet. I was hoping that the cut would have more of an hourglass shape than a simple taper, but it didn't work out that way. Changing the focus didn't make that much of a difference, but I ended up cutting all the pieces with it set in the middle of the material. Here's a full plate of construction pieces, patterned by hand in Inkscape: I used the Epilog Helix Mini 24 laser cutter to cut my construction kit. I used 5% speed, 100% power and 5000 Hz laser frequency. The Epilog Helix Mini 24 hard at work. The construction kit was very well received by my family. Here's Hj\u00f6rtur's (1 year old) hand touching something that my wife A\u00f0albj\u00f6rg made: After an evening of play, these are the resulting shapes. Wall-E, a drone, an axle with wheels, a cube, a coaster, a giraffe, a magic wand, an I-beam, and a tool to measure the curve of your spine. It works really well, it might be a handy tool for physiotherapists. I'm holding up Wall-E, which my older son Ernir (4 years old) made. This could be a template to measure spine curvature. Here are the design files: Download spreadsheet-driven FreeCAD design Download DXF file (one piece) Download SVG cutting file (462 pieces) Download PDF cutting file (462 pieces) Circuit cut with vinyl cutter I wanted to cut a copper sheet and make a circuit in the vinyl cutter. This hasn't been done before at my lab. My instructor \u00de\u00f3rarinn led me through the ins and outs of the vinyl cutter. He disassembled the knife to show me the tiny bearing that needs to be cleaned and lubricated when it jams: Then he explained the way to adjust the knife. You start by retracting the knife fully and then cutting manually into the material. If nothing happens, you extend the knife a little bit and cut again. You want to cut fully through the material and scratch the backing, but you don't want to cut through the backing. Then you would damage the rubber that is underneath, and that's part of the machine. You cut tabs like you see below and then bend the material and see if they come loose. When it looks like you're getting close, you start paying attention to the ticks on the knife. A course adjustment is two ticks, a fine adjustment is one tick. If the material comes loose when you bend it, but you're not cutting deep into the backing, you're golden: \u00de\u00f3rarinn suggested a methodology of testing to get good cuts in the copper. First I would adjust the knife as discussed above. The next step would be to vary the cutting force and the speed until I get a good result. However, after he adjusted the blade and the force for the regular vinyl, I made a test and it cut quite well through the copper! So the systematic testing ended up being just three tests. The first test didn't work out because the copper was crumpled on top of the backing. The second test I cut with 90 grams of force and it was almost there. The third test I cut with 120 grams of force and it looked good. The Roland CAMM-1 Servo GX-24 vinyl cutter. I used a force setting of 120 grams to cut the copper sheet. Here's my first try at weeding copper sheet: It worked, but could be better. Neil recommended sticking everything to the final surface and then weeding. I will definitely try that next time. But this time I weeded first and then glued the copper pads to the laser cut acrylic. I forgot to add holes for pins, so I drilled them afterwards: When I had my students make sensors like these, I added the holes to the laser cutting file. The only component on the board is a 10k resistor. I grabbed a 10k resistor and soldered it to the pads. I'm surprised that the tiny pads survived my rough handling. Then I measured the resistance from one pin to the touch sensitive pad, to check if the solder connections were OK: The soldering was fine, and the next step was to open a capacitive touch sensor example sketch in the Arduino IDE and connect my new sensor to a SparkFun RedBoard: Note I used ffmpeg -i input_video -vcodec libx264 -crf 25 -preset medium -vf scale=-2:1080 -acodec libmp3lame -q:a 4 -ar 48000 -ac 2 output_video.mp4 to compress the video from my phone. Fab Lab Kannai put this into a tutorial . It works! Here's the Arduino sketch, which I modified a little bit (just commented out what I didn't need): #include <CapacitiveSensor.h> /* * CapitiveSense Library Demo Sketch * Paul Badger 2008 * Uses a high value resistor e.g. 10M between send pin and receive pin * Resistor effects sensitivity, experiment with values, 50K - 50M. Larger resistor values yield larger sensor values. * Receive pin is the sensor pin - try different amounts of foil/metal on this pin */ //CapacitiveSensor cs_4_2 = CapacitiveSensor(4,2); // 10M resistor between pins 4 & 2, pin 2 is sensor pin, add a wire and or foil if desired CapacitiveSensor cs_4_6 = CapacitiveSensor ( 4 , 6 ); // 10M resistor between pins 4 & 6, pin 6 is sensor pin, add a wire and or foil //CapacitiveSensor cs_4_8 = CapacitiveSensor(4,8); // 10M resistor between pins 4 & 8, pin 8 is sensor pin, add a wire and or foil int LEDpin = 13 ; void setup () { // cs_4_2.set_CS_AutocaL_Millis(0xFFFFFFFF); // turn off autocalibrate on channel 1 - just as an example Serial . begin ( 9600 ); } void loop () { long start = millis (); // long total1 = cs_4_2.capacitiveSensor(30); long total2 = cs_4_6 . capacitiveSensor ( 30 ); // long total3 = cs_4_8.capacitiveSensor(30); // Serial.print(millis() - start); // check on performance in milliseconds // Serial.print(\"\\t\"); // tab character for debug windown spacing // Serial.print(total1); // print sensor output 1 Serial . print ( \" \\t \" ); Serial . println ( total2 ); // print sensor output 2 // Serial.print(\"\\t\"); // Serial.println(total3); // print sensor output 3 delay ( 10 ); // arbitrary delay to limit data to serial port // if (total3 > 40) // { // digitalWrite(LEDpin, HIGH); // } // else // { // digitalWrite(LEDpin, LOW); // } } Here are the design files: Download sensor vinyl cutting file Download outline laser cutting file Rubber stamp For the rubber stamp engraving, I used this reference . I was able to cut through the rubber at 5% speed and 100% power with out 40W Epilog Helix laser cutter: The engraving test that looked cleanest to me was at 30% speed and 100% power. I then engraved the Fab Lab \u00cdsafj\u00f6r\u00f0ur logo with two such passes and cut out the outline: As you can see, this makes for an awful stamp. I both forgot to mirror the logo and invert it, to make it stick out. I haven't had time to make a proper stamp yet, but the tests look promising. Here are the design files: Download rubber test file Download failed rubber stamp logo Download failed rubber stamp outline Kerf test The missing width when I've laser cut these ten lines is exactly 2 mm. That means that the laser's kerf (or the diameter of the laser point) is \\[\\frac{2mm}{10}=\\underline{0.2mm}\\] So when I design a press-fit joint, I need to offset all lines outward by 0.1 mm. Download kerf test Raster test I used the same raster test as I did with the rubber stamp. I set the laser power to 100% and the frequency to the maximum 5000 Hz. Then I enabled Color Mapping in the Epilog Laser settings within the Print dialog in the PDF viewer. Red is 60% speed, green is 50%, blue is 40%, yellow is 30%, magenta is 20% and cyan is 10%. That goes for both rastering (top row) and cutting (bottom row). Here's how the 4 mm MDF reacted to these settings: To get a clean raster, the speed must be above 30%. To cut through the material, the speed must be 10% or slower. I usually use 5% speed to make sure that the laser cuts through. Download raster test file .md-content__button { display: none; }","title":"3. Computer-Controlled Cutting"},{"location":"assignments/week03.html#computer-controlled-cutting","text":"","title":"Computer-Controlled Cutting   "},{"location":"assignments/week03.html#parametric-construction-kit","text":"I wanted to make a minimal parametric construction kit that was made up of only one piece. You can see the design taking shape in the hand-drawn sketches below as I was thinking about the design specifications. When I had decided on the specs, the design was fully defined. The piece can have no other shape. I wanted four pins and four pockets, so that defined the shape of the piece. The plan was to have the dimensions of the pins and pockets equal on all three axes, so that the pieces can be assembled every which way. This ultimately means that all the dimensions in the 2D sketch are controlled by the material thickness. I made a sketch in FreeCAD and set up a spreadsheet inside the part file. In the spreadsheet I defined two main parameters that I use to control the design; the material thickness and the kerf. There I am editing the value of the kerf: I made two test pieces (see the image at the top of the page) and decided to make the hole in the middle a star instead of a square. That way you can assemble the pieces at a 45\u00b0 angle, which is necessary because the angle between every pin and pocket is 45\u00b0. Then I made a series of kerf tests, which was easy because I only needed to change one parameter. I wound up making the fit tighter than I usually do (kerf = 0.24 mm) because the fit is tighter when the pieces are assembled in-plane than when they are perpendicular to each other. The 90\u00b0 perpendicular fits were always too loose and I didn't understand why. Kerf tests Then I thought about the way that the kerf tapers, so that more material is removed at the bottom of the material than at the top surface. This is because the laser has only one focus point, which is set at the top surface of the material. Below that point, the laser beam diverges, causing the sides of the pieces to not be completely perpendicular to the surface of the sheet. My instructor \u00de\u00f3rarinn said that there isn't really anything we can do about that, so I tried setting the focus in the middle of the material, by focusing on a 2 mm thick sheet and then cutting a 4 mm thick sheet. I was hoping that the cut would have more of an hourglass shape than a simple taper, but it didn't work out that way. Changing the focus didn't make that much of a difference, but I ended up cutting all the pieces with it set in the middle of the material. Here's a full plate of construction pieces, patterned by hand in Inkscape: I used the Epilog Helix Mini 24 laser cutter to cut my construction kit. I used 5% speed, 100% power and 5000 Hz laser frequency. The Epilog Helix Mini 24 hard at work. The construction kit was very well received by my family. Here's Hj\u00f6rtur's (1 year old) hand touching something that my wife A\u00f0albj\u00f6rg made: After an evening of play, these are the resulting shapes. Wall-E, a drone, an axle with wheels, a cube, a coaster, a giraffe, a magic wand, an I-beam, and a tool to measure the curve of your spine. It works really well, it might be a handy tool for physiotherapists. I'm holding up Wall-E, which my older son Ernir (4 years old) made. This could be a template to measure spine curvature. Here are the design files: Download spreadsheet-driven FreeCAD design Download DXF file (one piece) Download SVG cutting file (462 pieces) Download PDF cutting file (462 pieces)","title":"Parametric construction kit"},{"location":"assignments/week03.html#circuit-cut-with-vinyl-cutter","text":"I wanted to cut a copper sheet and make a circuit in the vinyl cutter. This hasn't been done before at my lab. My instructor \u00de\u00f3rarinn led me through the ins and outs of the vinyl cutter. He disassembled the knife to show me the tiny bearing that needs to be cleaned and lubricated when it jams: Then he explained the way to adjust the knife. You start by retracting the knife fully and then cutting manually into the material. If nothing happens, you extend the knife a little bit and cut again. You want to cut fully through the material and scratch the backing, but you don't want to cut through the backing. Then you would damage the rubber that is underneath, and that's part of the machine. You cut tabs like you see below and then bend the material and see if they come loose. When it looks like you're getting close, you start paying attention to the ticks on the knife. A course adjustment is two ticks, a fine adjustment is one tick. If the material comes loose when you bend it, but you're not cutting deep into the backing, you're golden: \u00de\u00f3rarinn suggested a methodology of testing to get good cuts in the copper. First I would adjust the knife as discussed above. The next step would be to vary the cutting force and the speed until I get a good result. However, after he adjusted the blade and the force for the regular vinyl, I made a test and it cut quite well through the copper! So the systematic testing ended up being just three tests. The first test didn't work out because the copper was crumpled on top of the backing. The second test I cut with 90 grams of force and it was almost there. The third test I cut with 120 grams of force and it looked good. The Roland CAMM-1 Servo GX-24 vinyl cutter. I used a force setting of 120 grams to cut the copper sheet. Here's my first try at weeding copper sheet: It worked, but could be better. Neil recommended sticking everything to the final surface and then weeding. I will definitely try that next time. But this time I weeded first and then glued the copper pads to the laser cut acrylic. I forgot to add holes for pins, so I drilled them afterwards: When I had my students make sensors like these, I added the holes to the laser cutting file. The only component on the board is a 10k resistor. I grabbed a 10k resistor and soldered it to the pads. I'm surprised that the tiny pads survived my rough handling. Then I measured the resistance from one pin to the touch sensitive pad, to check if the solder connections were OK: The soldering was fine, and the next step was to open a capacitive touch sensor example sketch in the Arduino IDE and connect my new sensor to a SparkFun RedBoard: Note I used ffmpeg -i input_video -vcodec libx264 -crf 25 -preset medium -vf scale=-2:1080 -acodec libmp3lame -q:a 4 -ar 48000 -ac 2 output_video.mp4 to compress the video from my phone. Fab Lab Kannai put this into a tutorial . It works! Here's the Arduino sketch, which I modified a little bit (just commented out what I didn't need): #include <CapacitiveSensor.h> /* * CapitiveSense Library Demo Sketch * Paul Badger 2008 * Uses a high value resistor e.g. 10M between send pin and receive pin * Resistor effects sensitivity, experiment with values, 50K - 50M. Larger resistor values yield larger sensor values. * Receive pin is the sensor pin - try different amounts of foil/metal on this pin */ //CapacitiveSensor cs_4_2 = CapacitiveSensor(4,2); // 10M resistor between pins 4 & 2, pin 2 is sensor pin, add a wire and or foil if desired CapacitiveSensor cs_4_6 = CapacitiveSensor ( 4 , 6 ); // 10M resistor between pins 4 & 6, pin 6 is sensor pin, add a wire and or foil //CapacitiveSensor cs_4_8 = CapacitiveSensor(4,8); // 10M resistor between pins 4 & 8, pin 8 is sensor pin, add a wire and or foil int LEDpin = 13 ; void setup () { // cs_4_2.set_CS_AutocaL_Millis(0xFFFFFFFF); // turn off autocalibrate on channel 1 - just as an example Serial . begin ( 9600 ); } void loop () { long start = millis (); // long total1 = cs_4_2.capacitiveSensor(30); long total2 = cs_4_6 . capacitiveSensor ( 30 ); // long total3 = cs_4_8.capacitiveSensor(30); // Serial.print(millis() - start); // check on performance in milliseconds // Serial.print(\"\\t\"); // tab character for debug windown spacing // Serial.print(total1); // print sensor output 1 Serial . print ( \" \\t \" ); Serial . println ( total2 ); // print sensor output 2 // Serial.print(\"\\t\"); // Serial.println(total3); // print sensor output 3 delay ( 10 ); // arbitrary delay to limit data to serial port // if (total3 > 40) // { // digitalWrite(LEDpin, HIGH); // } // else // { // digitalWrite(LEDpin, LOW); // } } Here are the design files: Download sensor vinyl cutting file Download outline laser cutting file","title":"Circuit cut with vinyl cutter"},{"location":"assignments/week03.html#rubber-stamp","text":"For the rubber stamp engraving, I used this reference . I was able to cut through the rubber at 5% speed and 100% power with out 40W Epilog Helix laser cutter: The engraving test that looked cleanest to me was at 30% speed and 100% power. I then engraved the Fab Lab \u00cdsafj\u00f6r\u00f0ur logo with two such passes and cut out the outline: As you can see, this makes for an awful stamp. I both forgot to mirror the logo and invert it, to make it stick out. I haven't had time to make a proper stamp yet, but the tests look promising. Here are the design files: Download rubber test file Download failed rubber stamp logo Download failed rubber stamp outline","title":"Rubber stamp"},{"location":"assignments/week03.html#kerf-test","text":"The missing width when I've laser cut these ten lines is exactly 2 mm. That means that the laser's kerf (or the diameter of the laser point) is \\[\\frac{2mm}{10}=\\underline{0.2mm}\\] So when I design a press-fit joint, I need to offset all lines outward by 0.1 mm. Download kerf test","title":"Kerf test"},{"location":"assignments/week03.html#raster-test","text":"I used the same raster test as I did with the rubber stamp. I set the laser power to 100% and the frequency to the maximum 5000 Hz. Then I enabled Color Mapping in the Epilog Laser settings within the Print dialog in the PDF viewer. Red is 60% speed, green is 50%, blue is 40%, yellow is 30%, magenta is 20% and cyan is 10%. That goes for both rastering (top row) and cutting (bottom row). Here's how the 4 mm MDF reacted to these settings: To get a clean raster, the speed must be above 30%. To cut through the material, the speed must be 10% or slower. I usually use 5% speed to make sure that the laser cuts through. Download raster test file .md-content__button { display: none; }","title":"Raster test"},{"location":"assignments/week04.html","text":"Embedded Programming The RP2040 datasheet I read the Raspberry Pi RP2040 datasheet loosely. This is one of the more interesting microcontrollers to me, because of the flexible Programmable Input/Output (PIO), two fast cores and you can program it in the Arduino IDE or in the friendly Python language. The datasheet also looks friendlier than datasheets for other microcontrollers. Still, that may only be the graphic design, because the actual content is cryptic. I understood like 0.5% of what I read. But I did pick up some interesting tidbits. The RP2040 has a built-in Real Time Clock. That would be useful for my wake-up mask. It has a dormant state which uses almost no power and it can be woken up by the RTC. That also sounds good for the wake-up mask. But in that case, the RTC needs to use an external clock source, which can be as slow as 1 Hz. Hold on, the RTC needs an external crystal. That's good to know. I thought you didn't need to add a crystal, but the only truly internally generated clock is the Ring Oscillator. Its speed varies with process, voltage and temperature, so it can't be used for RTC, USB communication or analog to digital conversion. When reading the RP2040 datasheet and testing the Xiao RP2040, I wondered: Does the tiny Xiao board have a crystal? I don't see one on the board. Is it underneath the metal cover with the RP2040? If it doesn't have an external oscillator, then the Real Time Clock doesn't work and I can't use it in my wake-up mask. I looked around and found the schematic for the Xiao RP2040 and there is a crystal and Flash memory. Good. They must be underneath the cover. So there is a crystal. But maybe the internal RTC in the RP2040 isn't good enough and I need to use something like the DS3231 chip . Update: My instructor \u00de\u00f3rarinn showed me a picture of what's underneath the metal cover. The picture was actually on the same page as the schematic that I had found, but I hadn't looked at the whole page. So here it is, and you can see the crystal marked with 12.000: I watched a video that stated that computers are horrible at division. So I was glad to see that the Raspberry Pi Foundation decided to implement a hardware division unit in the RP2040. I wondered why there was no hardware multiplier until I got to the section about the Arm Cortex M0+ cores inside the RP2040. The Arm specification has hardware multiplication built in. I watched another video that shows how you can easily overclock the Raspberry Pi Pico. Overclocking has always sounded to me as if you're going to overheat the device and damage it, or make it unstable, but the datasheet says that overclocking processors in general is usually fine. In the video, a maximum speed of 250 MHz is recommended. I was interested in the example programs provided under the Programmer's Model headings in the datasheet but I have no idea where to put them or how to run them. There are also lots of tables with registers and commands and stuff like that but I can make neither heads nor tails of them. What are they? What would I do with them? The mysteries continue. The RP2040 chip has 36 General Purpose Input/Output pins. That's quite a lot. Typically, a few of those pins are connected to an external flash memory chip, as in the Raspberry Pi Pico board. The Pi Pico board has 26 user-accessible pins. Three of them can be used for reading analog signals and converting them to digital numbers. The ADCs are officially 12 bit but the effective resolution is I think something like 10 bits because of an error in the chip design. The RP2040 has plenty of communication interfaces (SPI, UART, I2C, USB), and if you need something else, you can make it yourself with the Programmable IO blocks. It has eight two channel PWM generators (they're called slices). So can it generate 8 PWM signals, or 16? The PIO machines can always see the states of all pins. That's interesting. I think I remember reading that the DMA channels share a similar view. My Master's thesis advisor said that when programming in C, you get much more access to all the computer's devices and peripherals. When the University stopped paying the license for his vibration meter, he wrote a driver for it in C. That sounds like wizardry to me, and more than a little fascinating. But the C code examples in the RP2040 datasheet don't look fun to me. PIO This is the most interesting part, the Programmable IO. There are two PIO blocks in the RP2040 and each has four state machines. That means that you have eight simple, deterministic and precisely timed cores that are specialized for input and output. Each state machine has two 32 bit shift registers. Jakob Einar, a frequent guest at Fab Lab \u00cdsafj\u00f6r\u00f0ur, made a cnc plotter using old DVD drives. He said that a shift register enables a slow Arduino Uno to control all the steppers in sync. I'm curious about how a shift register works. There are two PIO blocks with four state machines each. Each state machine has: Two 32-bit shift registers (I need to find out what they do) Two 32-bit scratch registers (you can think of them as variables, they're called x and y) 4x32-bit bus FIFO in each direction or 8x32 in a single direction (data flows in and out using these and gets stored in the two variables x and y) Fractional clock divider (if you want the PIO to run slower than the main clock speed) DMA interface (to get lots of data from memory without using the processor, or put data into memory) IRQ flag set/clear/status (I think this is to alert the main processor that the PIO has finished an operation) The state machine has only nine instructions: JMP , WAIT , IN , OUT , PUSH , PULL , MOV , IRQ AND SET . Each instruction takes exactly one cycle. It's interesting to try programming at this level. I like the fact that the first three bits of each instruction is like this: JMP : 001, WAIT : 010 and so on. I understand that labeling system! Precise timing is what microcontrollers are good at! You can get PIO programs from the Raspberry Pi Pico PIO library, you can write them yourself or (and this is the most interesting case) you can generate them programmatically. I wouldn't know how to do that, though. All the supplied code examples are written in C. This may be a good introduction. I'm putting this here for myself to discover later. Here's some more stuff. I did find one example of using DMA in MicroPython code. The iosoft blog says that MicroPython doesn't have built-in functions to support DMA and doesn't provide a simple way to access the ADC, DMA and I/O pin registers. But there is a way to define these registers using the obscure uctypes . And then you need to be familiar with the RP2040 datasheet. Here someone has written some code based on the iosoft blog. This should be easy to try out, because it's Python code. Here's a resource with good pictures that goes into PIO + DMA. Most instructions are executed from the instruction memory, but there are a few other sources. The most versatile of these is the OUT EXEC instruction. You can use this to embed instructions in the data stream passing through the FIFO. I didn't understand the Wikipedia entry on shift registers. The Sparkfun article says that they're used to add more I/O pins to a microcontrollers. But that's not what's happening in the PIO. Apparently, you load one bit into the shift register at a time (a 1 or a 0). When a new bit enters, all the bits inside the shift register are shifted to make room for it. Then you can use the latch pin to output all the bits at the same time from the shift register. So you're turning a serial port into a parallel port. I guess this is useful for synchronized CNC movements, but how does it work inside the PIO block in the RP2040? The output of the PIO assembler is shown, and it consists of hexadecimal numbers. They look scary and incomprehensible. I noticed that they all have an x in them, so I asked Google why hexadecimal numbers all start with 0x. Turns out it's just to let the parser know that it's a hexadecimal base number instead of other bases. I've seen a lot of definitions and explanations, but what I really need is an example that I can run. I still don't understand how the data flows and how the operations are carried out. To see what's going on inside the chip, I probably need something like PicoReg . It's a debugger for the Raspberry Pi Pico. The advantage is that it's written entirely in Python I can set it up on a Raspberry Pi. I can wrap my head around that. I'd like to try to implement a PIO+DMA stepper controller with acceleration ramping. But I likely won't have time for that this week. V. Hunter Adams at Cornell has implemented it , but to use it you need to set up a toolchain for C. This forum post is also interesting. cleverca22 's comment on this post may also hold clues. Seemingly unrelated, but there may also be some clues in the servo easing algorithm. There are stepper control PIO programs here and here . But they don't implement ramping. And again, you need to use the C/C++ SDK. The RP2040 datasheet says that if you want to get started with PIO, a walkthrough of writing your first PIO program is in the Pico C/C++ SDK. It also covers using PIO with DMA. But I want to use Python. Or at least I want to be able to make a library for Python. Is it possible to do that in C/C++? Yes. But if you build an external C module for MicroPython, you have to build your own MicroPython firmware to put on the RP2040. There's another possibility , which I like better: An alternative approach is to use Native machine code in .mpy files which allows writing custom C code that is placed in a .mpy file, which can be imported dynamically in to a running MicroPython system without the need to recompile the main firmware. I flipped quickly through all the communications protocols but read PWM more carefully. If I will use brushless motors in my robot arm, I need to get to know Pulse Width Modulation. It's interesting how flexible the pin mapping on the RP2040 is, the PWM slices can use any of the GPIO pins, and so can PIO. I actually understood a part of the description of how a PWM slice works. It's a 16-bit counter that the wraps back to zero. Wrap is a command in pioasm that enables you to jump back to the beginning without using a JMP command, and it takes 0 cycles. I'm beginning to understand a little bit, even if I can't use these things yet. I guess I could DMA to push a sine wave from a lookup table to three PWM outputs to control a three-phase brushless motor. Then the main processors would be free to do other tasks. This arbitrary wave generator using PIO and DMA may be worth taking a look at when I have some time. There's a lot going on inside this $1 chip. It's like a whole city, complete with different neighborhoods connected together with highways and smaller roads to move data between places. All roads lead to the C/C++ SDK, it seems. Here are community libraries for the SDK. There are also lots of examples in the Raspberry Pi GitHub repo. I will have to jump in at some point. One question remains: What is the OSR? There is a bit of programmer humor in the datasheet, on page 359 it says that I2C is an ubiquitous serial bus first described in the Dead Sea Scrolls, and later used by Philips Semiconductor. On page 364 it is conjectured that the PIO can run DOOM with a high enough clock speed. On page 365 it says that a full 32-bit addition takes the PIO only around one minute at 125 MHz. What? So using the PIO for mathematical operations takes hundreds of millions of cycles? DMA Direct Memory Access is the other interesting feature in the RP2040. It's a memory controller that can copy data from one place to another very fast without the processor's intervention. Throughout the RP2040 datasheet there are mentions that this and that part of the chip has a DMA interface. It looks like it's important to learn to use DMA if you want to make things run fast. Could DMA be used to feed a PIO state machine with acceleration ramps for a stepper? I would like to learn a little bit about how DMA works and how to set it up, but it seems that it's not possible in MicroPython. The Raspberry Pi Pico C/C++ SDK instructions mention that you can use DMA by including a library called hardware_dma. Setting up a C/C++ toolchain sounds intimidating, but maybe I have to do it if I want to try using DMA. I think DMA programming is too complicated to get into for now. ADC The Analog to Digital Converter takes 96 clock cycles to make a 12-bit measurement. That's good to know. The RP2040 ADC has a few errors, most notably that the quantization error looks like a sawtooth. Also, because the wrong size of capacitor was used in one part of the ADC, there are big spikes in differential non-linearity. I won't pretend to know what that is, but it means that there will be spikes in the measured values in four places. The scaling factor for the internal temperature sensor is specified here in the datasheet. You can see it an example code in the Chip temperature section below. I looked at the Raspberry Pi Pico Python SDK and found it a bit thin. I also skimmed Getting started with Raspberry Pi Pico, which shows how to set up a C/C++ programming environment for the Pi Pico, and it looks extremely complicated. But that's where the juicy stuff is. Programming the Xiao RP2040 The Xiao RP2040 has a reset button, so it's more convenient to use in that respect than the Raspberry Pi Pico. Hello RP2040 I tried Neil Gershenfeld's Hello RP2040 Python program: I like having an RGB LED to play with. It could serve as a simple interface for the wake-up mask or the robot arm if I assign a different meaning to every color. Chip temperature I did this one with the Raspberry Pi Pico. Setup and programming is identical to the Xiao RP2040. I found a nice tutorial on using the temperature sensor which is built into the RP2040 chip. It's useful to make sure that the chip doesn't overheat, but it's also a good exercise in measuring an analog voltage. Since I've tried blinking the onboard LED already, I'm going to use that knowledge to modify this program to turn on the LED when the temperature crosses a threshold: from machine import ADC , Pin import time led = machine . Pin ( \"LED\" , machine . Pin . OUT ) adc = machine . ADC ( 4 ) while True : ADC_voltage = adc . read_u16 () * ( 3.3 / ( 65535 )) temperature = 27 - ( ADC_voltage - 0.706 ) / 0.001721 print ( \"Temperature: {} \u00b0C\" . format ( temperature )) if temperature > 26 : led . value ( 1 ) else : led . value ( 0 ) time . sleep_ms ( 100 ) I added the if statement and removed the temperature in Fahrenheit. You can see that you need to scale the analog measurement to get the temperature in degrees Celsius. I suspect that that every chip will give a slightly different value because of manufacturing variability. So it might be better to measure the temperature with a better sensor and put that value into the scaling factor. But since this sensor isn't meant to be super precise, we'll let it be. Interfacing with LCD screen I connected a classic 16x2 character LCD screen to the Raspberry Pi Pico, but it didn't work. I needed to use the 4-bit mode (where you connect the LCD directly to the microcontroller) because I didn't have an LCD driver chip. Aby Michael's Fab Academy site had a useful diagram with the LCD pins. The ElectronicWings diagram was even more useful. After some Googling I found that the screen needs 5V signals. The RP2040 is a 3.3V chip, so that's why I'm getting glitchy results. Its strange that it worked with a Pico in the tutorial that I used (click the link in the video description for a connection diagram). I'd like to try the small OLED screen that is in the Fab Lab inventory next. That one is 3-5V tolerant, and much smaller and versatile than the 16x2. And it only needs four pins. Look at that nest of wires above! The OLED will be a great improvement. Here are the files I used, you just open each of them in Thonny and save them to your Pico. The main.py file runs automatically when you power the Pico on. The other two are a library for interfacing with the LCD. Beware, this didn't work for me. Download main.py Download lcd_api.py Download gpio_lcd.py Interfacing with OLED screen I used Kevin McAleer's simple code example for the SSD1306 OLED screen. It just writes Test 1 to the screen. It was exhilarating to see the tiny letters light up on the screen on the first try! I then changed the text to something more useful, a prototype display for the Frankenstein MCU , which Fran and my instructor \u00de\u00f3rarinn are working on. The plan is to make a WiFi connected button in every Fab Lab and the staff push the button every day to show that the lab is active. The connections between labs appear on a spinning globe made in Three.js, which runs in the browser. Download oled.py Stepper control with RP2040 This one I also did with the Raspberry Pi Pico. from machine import Pin from time import sleep IN1 = Pin ( 2 , Pin . OUT ) IN2 = Pin ( 3 , Pin . OUT ) IN3 = Pin ( 4 , Pin . OUT ) IN4 = Pin ( 5 , Pin . OUT ) pins = [ IN1 , IN2 , IN3 , IN4 ] sequence = [[ 1 , 0 , 0 , 0 ],[ 0 , 1 , 0 , 0 ],[ 0 , 0 , 1 , 0 ],[ 0 , 0 , 0 , 1 ]] while True : for step in sequence : for i in range ( len ( pins )): pins [ i ] . value ( step [ i ]) sleep ( 0.01 ) Controlling the stepper is surprisingly simple. It has four phases, so you connect it to four pins on the Pico. Then you just alternate which pin is HIGH while the others are low. The stepper motor has 64 steps per rotation, but it also has a 1/64 gearing ratio, so in total it has 64 * 64 = 4096 steps per rotation. That's pretty good for a 3 dollar stepper! I bought a set of five 28-BYJ-48 steppers with ULN2003 drivers for $15. They're cheap and precise! But there are two drawbacks. They're quite weak. For a motor with 1:64 gearing, it's surprisingly easy to make it skip steps. Also, the gearbox introduces backlash which is much bigger than the step size. The step size is 0.09\u00b0 but the backlash seems to be a few degrees. Maybe it's possible to correct for the slop in software every time the motor changes direction. But that won't work 100% and definitely not with small motions. I wonder if these motors are a good fit for my robot arm. In the video above I changed the sleep value and then ran the program. First it was 0.1 s, then 0.01 s and finally 0.001 s. When I went below 0.01 s, the stepper stalled. It's fun to have status LEDs on all the phases. At slow speeds you can see how the phases are turned on and off. I want to have status LEDS on every output pin on every microcontroller board! It's a really handy debugging tool. I laser cut a press-fit acrylic arrow to see the motor's movement better. Since I have the 12V version of the 28BYJ-48 motors, I can run them on a 9V battery. So here's my first foray into battery-powered electronics. The Pico is still powered by a USB cable, though. I need to learn how to make a 5V regulator board, so that I can also power the Pico using the 9V battery. PIO stepper control I followed a tutorial and wrote a program that controls a stepper with PIO, without using the processor at all. I then modified it to include four PIO state machines that run the same program but are clocked at different frequencies: Here's the code: from machine import Pin from rp2 import PIO, StateMachine, asm_pio from time import sleep import sys @asm_pio(set_init=(PIO.OUT_LOW,) * 4) def prog(): wrap_target() set(pins, 8) [31] #8 nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] set(pins, 4) [31] #4 nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] set(pins, 2) [31] #2 nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] set(pins, 1) [31] #1 nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] wrap() sm0 = StateMachine(0, prog, freq=100000, set_base=Pin(2)) sm1 = StateMachine(1, prog, freq=50000, set_base=Pin(6)) sm2 = StateMachine(2, prog, freq=25000, set_base=Pin(10)) sm3 = StateMachine(3, prog, freq=12500, set_base=Pin(18)) sm0.active(1) sm1.active(1) sm2.active(1) sm3.active(1) sleep(5) sm0.active(0) sm1.active(0) sm2.active(0) sm3.active(0) sm0.exec(\"set(pins,0)\") sm1.exec(\"set(pins,0)\") sm2.exec(\"set(pins,0)\") sm3.exec(\"set(pins,0)\") nop() is no operation, and you can optionally add a delay after every command, like this: [1] . That was a delay of one clock cycle. I think 31 cycles is the maximum. Adding a Reset button The Raspberry Pi Pico has no reset button, but it resets if you connect the RUN pin to ground. So I just connected a jumper to the RUN pin on the breadboard and make the other end of the jumper touch the GND pin that is one pin over to reset the Pico. That's more convenient than unplugging the USB cable and plugging it in again. Overclocking the RP2040 I tried Chris DeHut's RP2040 overclocking video . He has lots of good stuff about the Pico. Here's his program which changes the clock speed a few times and measures the time it takes to do 100.000 sets of the calculations in the Do_Stuff function: ''' PICO default clock speed is 125 MHz Demo to show time to make a bunch of basic math calculations at varaious clock speeds that the PICO can handle ''' import machine import time import machine led_onboard = machine . Pin ( 25 , machine . Pin . OUT ) def Do_Stuff (): st = time . ticks_ms () Y = 0 while Y < 100000 : Y += 1 Z = 57 Z1 = Z + Y Z2 = Z - Y Z3 = Z * ( Z + Y ) #print(Y, Z1, Z2, Z3) led_onboard . value ( 0 ) #print(Y) et = time . ticks_ms () #print(et, st, et-st) return et - st cntr = 0 while cntr < 2 : #run whole test several times for observation cntr += 1 machine . freq ( 125000000 ) #set clock to 125 MHz x = machine . freq () t = Do_Stuff () print ( \" \\n @\" , x , \" time to run =\" , t , \"ms\" ) machine . freq ( 140000000 ) #set clock to 140 MHz x = machine . freq () t = Do_Stuff () print ( \" \\n @ \" , x , \" time to run =\" , t , \"ms\" ) machine . freq ( 200000000 ) #set clock to 200 MHz x = machine . freq () t = Do_Stuff () print ( \" \\n @ \" , x , \" time to run =\" , t , \"ms\" ) machine . freq ( 250000000 ) #set clock to 250 MHz x = machine . freq () t = Do_Stuff () print ( \" \\n @ \" , x , \" time to run =\" , t , \"ms\" ) machine . freq ( 125000000 ) #set clock to 125 MHz to clean things up print ( \" \\n All Done Folks!\" ) The loop runs a few times so that you can take the average of the time measurements. This is one set of measurements: @ 125000000 time to run = 1167 ms @ 140000000 time to run = 1042 ms @ 200000000 time to run = 729 ms @ 250000000 time to run = 583 ms That's a big difference by varying the clock speed, and the RP2040 datasheet says that overclocking is usually safe. I will keep this in mind when my programs are running slowly. Then I tried going a bit higher. It worked at 280 MHz, but at 290 or 300 MHz, I got no response from the microcontroller. So this is the best time: @ 280000000 time to run = 521 ms I think I will not go above 250 MHz. That's a lot, twice as fast as the standard RP2040 with a 12 MHz crystal, like in the Pico and the Xiao RP2040. There are instructions online on how to go up to 420 MHz and there is at least one commercial board that runs the RP2040 at 400 MHz. Custom performance test I modified the PIO program to control four steppers at different speeds, by modifying the clock dividers in each of the state machines. The state machines all run the same program. It works, and there should be no load on the main processor. The video looks the same as the four stepper video above. Now let's verify that there is no load on the main processor. I'll take Chris DeHut's 100 thousand calculation routine and put it into the PIO stepper program. Here are the results while driving four steppers at different speeds at the same time: @ 125000000 time to run = 1167 ms @ 140000000 time to run = 1042 ms @ 200000000 time to run = 729 ms @ 250000000 time to run = 584 ms Those are the same times as the first case, which had no steppers. Wow! I then increased the number of calculation loops to 300 thousand, so that we can see what happens to the steppers as we increase the main clock frequency from 125 MHz to 140, 200 and finally 250 MHz. As you can see from the video below, the steppers speed up until the fastest stepper stalls when the clock speed goes up to 250 MHz. For comparison with the PIO routine, I also tried to mix the 100k calculation code with code where the processor controls four steppers at the same time, but i couldn't get those two things to happen at the same time. But I could probably run those things on core0 and core1 with good results. Let's try, using this tutorial to learn how threads work in Python: @ 125000000 time to run = 1181 ms @ 140000000 time to run = 1053 ms @ 200000000 time to run = 734 ms @ 250000000 time to run = 587 ms It worked! My first dual-core program! The steppers just kept on running on core1 after the calculations finished on core0. And the calculation times are good! They're just a few milliseconds longer the 4 stepper PIO + 100k calculation routine. Here's the code: import machine import time import machine from machine import Pin from rp2 import PIO, StateMachine, asm_pio from time import sleep import sys led_onboard = machine.Pin(25, machine.Pin.OUT) @asm_pio(set_init=(PIO.OUT_LOW,) * 4) def prog(): wrap_target() set(pins, 8) [31] #8 nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] set(pins, 4) [31] #4 nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] set(pins, 2) [31] #2 nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] set(pins, 1) [31] #1 nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] wrap() sm0 = StateMachine(0, prog, freq=50000, set_base=Pin(2)) sm1 = StateMachine(1, prog, freq=25000, set_base=Pin(6)) sm2 = StateMachine(2, prog, freq=12500, set_base=Pin(10)) sm3 = StateMachine(3, prog, freq=6250, set_base=Pin(18)) sm0.active(1) sm1.active(1) sm2.active(1) sm3.active(1) #sleep(5) def Do_Stuff(): st = time.ticks_ms() Y = 0 while Y < 300000:+\u00f0\u00f0\u00f0\u00f0 Y += 1 Z = 57 Z1 = Z + Y Z2 = Z - Y Z3 = Z * (Z + Y) #print(Y, Z1, Z2, Z3) led_onboard.value(0) #print(Y) et = time.ticks_ms() #print(et, st, et-st) return et-st cntr = 0 while cntr < 2: #run whole test several times for observation cntr += 1 machine.freq(125000000) #set clock to 125 MHz x = machine.freq() t = Do_Stuff() print(\"\\n@\", x, \" time to run =\", t, \"ms\") machine.freq(140000000) #set clock to 140 MHz x = machine.freq() t = Do_Stuff() print(\"\\n@ \", x, \" time to run =\", t, \"ms\") machine.freq(200000000) #set clock to 200 MHz x = machine.freq() t = Do_Stuff() print(\"\\n@ \", x, \" time to run =\", t, \"ms\") machine.freq(250000000) #set clock to 250 MHz x = machine.freq() t = Do_Stuff() print(\"\\n@ \", x, \" time to run =\", t, \"ms\") machine.freq(125000000) #set clock to 125 MHz to clean things up print(\"\\n All Done Folks!\") sm0.active(0) sm1.active(0) sm2.active(0) sm3.active(0) sm0.exec(\"set(pins,0)\") sm1.exec(\"set(pins,0)\") sm2.exec(\"set(pins,0)\") sm3.exec(\"set(pins,0)\") Xiao SAMD21 The SAMD21 runs at 48 MHz, which is considerably slower than the RP2040. But it's easy to solder. I'll probably use it because of that. It also has a DAC output. For hobbyists, the SAMD21 can only be programmed in the Arduino IDE and CircuitPython. For professionals, you can use Microchip Studio or set up your own toolchain. So I set up the board in the Arduino IDE the way you would set up any new board: I added the proper link into \"Additional Boards Manager URLs\" in Preferences, and then I could find Seeed SAMD21 Boards in the Boards Manager and install them. It's as easy as that, and then selecting the right board (Seeeduino Xiao). The setup is covered in this tutorial . I tried connecting it to the Arduino IDE but it didn't work. Xiao ESP32 C3 The ESP32 C3 runs at 160 MHz, which is fast, but it's not possible to overclock it. But the most amazing thing is that this tiny microcontroller has WiFi and Bluetooth built in! This is my first RISC-V chip. As Neil says, RISC-V will probably take over the world. It's an open source architecture, so unlike the ARM chips that are found in microcontrollers, billions of smartphones and all the way up to supercomputers, manufacturers don't have to pay huge licensing fees to make RISC-V chips. I tried connecting it to the Arduino IDE but it didn't work. ESP-32 CAM ESP-32 CAM is a 10 dollar WiFi camera! I had to have one and try it out. It might play well with my robot arm. I followed this tutorial to set up the ESP32-CAM board. To install the ESP32 boards in the Arduino IDE, I followed that tutorial . Then I tested the board with this tutorial . I uncommented the AI-Thinker CAM definition in the setup and input my WiFi credentials in the sketch. When the Arduino IDE had uploaded the code, the following message was left hanging: Leaving... Hard resetting via RTS pin... But then I checked and saw that above it, the status bar said \"Done uploading\". I also found this explanation , which said that this isn't an error at all. First I saw nothing in the serial monitor. Then I removed an extra #define camera line. Still nothing. Then I switched to the 5G network, the same as my laptop is on. Then I saw a series of dots form .... but no IP address. But when I did in the opposite order from the tutorial (first pressed RESET, then removed the jumper) I got this message: ets Jul 29 2019 12:21:46 rst:0x1 (POWERON_RESET),boot:0x3 (DOWNLOAD_BOOT(UART0/UART1/SDIO_REI_REO_V2)) waiting for download ets Jul 29 2019 12:21:46 Something's happening! I removed the jumper and pressed reset again: rst:0x1 (POWERON_RESET),boot:0x13 (SPI_FAST_FLASH_BOOT) configsip: 0, SPIWP:0xee clk_drv:0x00,q_drv:0x00,d_drv:0x00,cs0_drv:0x00,hd_drv:0x00,wp_drv:0x00 mode:DIO, clock div:1 load:0x3fff0030,len:1344 load:0x40078000,len:13836 load:0x40080400,len:3608 entry 0x400805f0 . WiFi connected Camera Ready! Use 'http://192.168.1.32' to connect Success! After scrolling down and pressing \"Start stream\", I could see a live stream from the camera over WiFi to my browser! At QVGA resolution (320x240), the stream is smooth. At UXGA resolution (1600x1200), the stream stutters. SVGA (800x600) is the highest resolution where lag isn't very noticeable. That's pretty good for a microcontroller! I love it. And I find that in low light, greyscale looks a lot better than a color image. Note This time I used ffmpeg -i esp32-cam_test1.mp4 -vf scale=400:-2 -vcodec libx264 -an -crf 20 esp32-cam_test1_web.mp4 to compress the video and remove the audio as shown here . This video shows ways to control the image capture just by typing URLs into the browser. If you have a computer nearby at all times, you can also stream video from the ESP32-CAM to any browser in the world. And here's a guide to modifying the HTML code inside the Arduino code. It's not obvious how to do it, since the HTML has been converted to hex code in the Arduino sketch. I checked whether it's possible to overclock the ESP32, but 240 MHz seems to be the maximum clock speed. FPGA I bought an FPGA development board from Seeed Studio. It is the Runber board , which contains a Gowin FPGA. I took the plunge because it is the cheapest FPGA board I've found that still has a set of tutorials . I looked at some of the documentation and it looks intimidating. I applied for a software license on a Friday and got it in the beginning of the next week. I haven't had the time to set it up and go through a tutorial, though. .md-content__button { display: none; }","title":"4. Embedded Programming"},{"location":"assignments/week04.html#embedded-programming","text":"","title":"Embedded Programming   "},{"location":"assignments/week04.html#the-rp2040-datasheet","text":"I read the Raspberry Pi RP2040 datasheet loosely. This is one of the more interesting microcontrollers to me, because of the flexible Programmable Input/Output (PIO), two fast cores and you can program it in the Arduino IDE or in the friendly Python language. The datasheet also looks friendlier than datasheets for other microcontrollers. Still, that may only be the graphic design, because the actual content is cryptic. I understood like 0.5% of what I read. But I did pick up some interesting tidbits. The RP2040 has a built-in Real Time Clock. That would be useful for my wake-up mask. It has a dormant state which uses almost no power and it can be woken up by the RTC. That also sounds good for the wake-up mask. But in that case, the RTC needs to use an external clock source, which can be as slow as 1 Hz. Hold on, the RTC needs an external crystal. That's good to know. I thought you didn't need to add a crystal, but the only truly internally generated clock is the Ring Oscillator. Its speed varies with process, voltage and temperature, so it can't be used for RTC, USB communication or analog to digital conversion. When reading the RP2040 datasheet and testing the Xiao RP2040, I wondered: Does the tiny Xiao board have a crystal? I don't see one on the board. Is it underneath the metal cover with the RP2040? If it doesn't have an external oscillator, then the Real Time Clock doesn't work and I can't use it in my wake-up mask. I looked around and found the schematic for the Xiao RP2040 and there is a crystal and Flash memory. Good. They must be underneath the cover. So there is a crystal. But maybe the internal RTC in the RP2040 isn't good enough and I need to use something like the DS3231 chip . Update: My instructor \u00de\u00f3rarinn showed me a picture of what's underneath the metal cover. The picture was actually on the same page as the schematic that I had found, but I hadn't looked at the whole page. So here it is, and you can see the crystal marked with 12.000: I watched a video that stated that computers are horrible at division. So I was glad to see that the Raspberry Pi Foundation decided to implement a hardware division unit in the RP2040. I wondered why there was no hardware multiplier until I got to the section about the Arm Cortex M0+ cores inside the RP2040. The Arm specification has hardware multiplication built in. I watched another video that shows how you can easily overclock the Raspberry Pi Pico. Overclocking has always sounded to me as if you're going to overheat the device and damage it, or make it unstable, but the datasheet says that overclocking processors in general is usually fine. In the video, a maximum speed of 250 MHz is recommended. I was interested in the example programs provided under the Programmer's Model headings in the datasheet but I have no idea where to put them or how to run them. There are also lots of tables with registers and commands and stuff like that but I can make neither heads nor tails of them. What are they? What would I do with them? The mysteries continue. The RP2040 chip has 36 General Purpose Input/Output pins. That's quite a lot. Typically, a few of those pins are connected to an external flash memory chip, as in the Raspberry Pi Pico board. The Pi Pico board has 26 user-accessible pins. Three of them can be used for reading analog signals and converting them to digital numbers. The ADCs are officially 12 bit but the effective resolution is I think something like 10 bits because of an error in the chip design. The RP2040 has plenty of communication interfaces (SPI, UART, I2C, USB), and if you need something else, you can make it yourself with the Programmable IO blocks. It has eight two channel PWM generators (they're called slices). So can it generate 8 PWM signals, or 16? The PIO machines can always see the states of all pins. That's interesting. I think I remember reading that the DMA channels share a similar view. My Master's thesis advisor said that when programming in C, you get much more access to all the computer's devices and peripherals. When the University stopped paying the license for his vibration meter, he wrote a driver for it in C. That sounds like wizardry to me, and more than a little fascinating. But the C code examples in the RP2040 datasheet don't look fun to me.","title":"The RP2040 datasheet"},{"location":"assignments/week04.html#pio","text":"This is the most interesting part, the Programmable IO. There are two PIO blocks in the RP2040 and each has four state machines. That means that you have eight simple, deterministic and precisely timed cores that are specialized for input and output. Each state machine has two 32 bit shift registers. Jakob Einar, a frequent guest at Fab Lab \u00cdsafj\u00f6r\u00f0ur, made a cnc plotter using old DVD drives. He said that a shift register enables a slow Arduino Uno to control all the steppers in sync. I'm curious about how a shift register works. There are two PIO blocks with four state machines each. Each state machine has: Two 32-bit shift registers (I need to find out what they do) Two 32-bit scratch registers (you can think of them as variables, they're called x and y) 4x32-bit bus FIFO in each direction or 8x32 in a single direction (data flows in and out using these and gets stored in the two variables x and y) Fractional clock divider (if you want the PIO to run slower than the main clock speed) DMA interface (to get lots of data from memory without using the processor, or put data into memory) IRQ flag set/clear/status (I think this is to alert the main processor that the PIO has finished an operation) The state machine has only nine instructions: JMP , WAIT , IN , OUT , PUSH , PULL , MOV , IRQ AND SET . Each instruction takes exactly one cycle. It's interesting to try programming at this level. I like the fact that the first three bits of each instruction is like this: JMP : 001, WAIT : 010 and so on. I understand that labeling system! Precise timing is what microcontrollers are good at! You can get PIO programs from the Raspberry Pi Pico PIO library, you can write them yourself or (and this is the most interesting case) you can generate them programmatically. I wouldn't know how to do that, though. All the supplied code examples are written in C. This may be a good introduction. I'm putting this here for myself to discover later. Here's some more stuff. I did find one example of using DMA in MicroPython code. The iosoft blog says that MicroPython doesn't have built-in functions to support DMA and doesn't provide a simple way to access the ADC, DMA and I/O pin registers. But there is a way to define these registers using the obscure uctypes . And then you need to be familiar with the RP2040 datasheet. Here someone has written some code based on the iosoft blog. This should be easy to try out, because it's Python code. Here's a resource with good pictures that goes into PIO + DMA. Most instructions are executed from the instruction memory, but there are a few other sources. The most versatile of these is the OUT EXEC instruction. You can use this to embed instructions in the data stream passing through the FIFO. I didn't understand the Wikipedia entry on shift registers. The Sparkfun article says that they're used to add more I/O pins to a microcontrollers. But that's not what's happening in the PIO. Apparently, you load one bit into the shift register at a time (a 1 or a 0). When a new bit enters, all the bits inside the shift register are shifted to make room for it. Then you can use the latch pin to output all the bits at the same time from the shift register. So you're turning a serial port into a parallel port. I guess this is useful for synchronized CNC movements, but how does it work inside the PIO block in the RP2040? The output of the PIO assembler is shown, and it consists of hexadecimal numbers. They look scary and incomprehensible. I noticed that they all have an x in them, so I asked Google why hexadecimal numbers all start with 0x. Turns out it's just to let the parser know that it's a hexadecimal base number instead of other bases. I've seen a lot of definitions and explanations, but what I really need is an example that I can run. I still don't understand how the data flows and how the operations are carried out. To see what's going on inside the chip, I probably need something like PicoReg . It's a debugger for the Raspberry Pi Pico. The advantage is that it's written entirely in Python I can set it up on a Raspberry Pi. I can wrap my head around that. I'd like to try to implement a PIO+DMA stepper controller with acceleration ramping. But I likely won't have time for that this week. V. Hunter Adams at Cornell has implemented it , but to use it you need to set up a toolchain for C. This forum post is also interesting. cleverca22 's comment on this post may also hold clues. Seemingly unrelated, but there may also be some clues in the servo easing algorithm. There are stepper control PIO programs here and here . But they don't implement ramping. And again, you need to use the C/C++ SDK. The RP2040 datasheet says that if you want to get started with PIO, a walkthrough of writing your first PIO program is in the Pico C/C++ SDK. It also covers using PIO with DMA. But I want to use Python. Or at least I want to be able to make a library for Python. Is it possible to do that in C/C++? Yes. But if you build an external C module for MicroPython, you have to build your own MicroPython firmware to put on the RP2040. There's another possibility , which I like better: An alternative approach is to use Native machine code in .mpy files which allows writing custom C code that is placed in a .mpy file, which can be imported dynamically in to a running MicroPython system without the need to recompile the main firmware. I flipped quickly through all the communications protocols but read PWM more carefully. If I will use brushless motors in my robot arm, I need to get to know Pulse Width Modulation. It's interesting how flexible the pin mapping on the RP2040 is, the PWM slices can use any of the GPIO pins, and so can PIO. I actually understood a part of the description of how a PWM slice works. It's a 16-bit counter that the wraps back to zero. Wrap is a command in pioasm that enables you to jump back to the beginning without using a JMP command, and it takes 0 cycles. I'm beginning to understand a little bit, even if I can't use these things yet. I guess I could DMA to push a sine wave from a lookup table to three PWM outputs to control a three-phase brushless motor. Then the main processors would be free to do other tasks. This arbitrary wave generator using PIO and DMA may be worth taking a look at when I have some time. There's a lot going on inside this $1 chip. It's like a whole city, complete with different neighborhoods connected together with highways and smaller roads to move data between places. All roads lead to the C/C++ SDK, it seems. Here are community libraries for the SDK. There are also lots of examples in the Raspberry Pi GitHub repo. I will have to jump in at some point. One question remains: What is the OSR? There is a bit of programmer humor in the datasheet, on page 359 it says that I2C is an ubiquitous serial bus first described in the Dead Sea Scrolls, and later used by Philips Semiconductor. On page 364 it is conjectured that the PIO can run DOOM with a high enough clock speed. On page 365 it says that a full 32-bit addition takes the PIO only around one minute at 125 MHz. What? So using the PIO for mathematical operations takes hundreds of millions of cycles?","title":"PIO"},{"location":"assignments/week04.html#dma","text":"Direct Memory Access is the other interesting feature in the RP2040. It's a memory controller that can copy data from one place to another very fast without the processor's intervention. Throughout the RP2040 datasheet there are mentions that this and that part of the chip has a DMA interface. It looks like it's important to learn to use DMA if you want to make things run fast. Could DMA be used to feed a PIO state machine with acceleration ramps for a stepper? I would like to learn a little bit about how DMA works and how to set it up, but it seems that it's not possible in MicroPython. The Raspberry Pi Pico C/C++ SDK instructions mention that you can use DMA by including a library called hardware_dma. Setting up a C/C++ toolchain sounds intimidating, but maybe I have to do it if I want to try using DMA. I think DMA programming is too complicated to get into for now.","title":"DMA"},{"location":"assignments/week04.html#adc","text":"The Analog to Digital Converter takes 96 clock cycles to make a 12-bit measurement. That's good to know. The RP2040 ADC has a few errors, most notably that the quantization error looks like a sawtooth. Also, because the wrong size of capacitor was used in one part of the ADC, there are big spikes in differential non-linearity. I won't pretend to know what that is, but it means that there will be spikes in the measured values in four places. The scaling factor for the internal temperature sensor is specified here in the datasheet. You can see it an example code in the Chip temperature section below. I looked at the Raspberry Pi Pico Python SDK and found it a bit thin. I also skimmed Getting started with Raspberry Pi Pico, which shows how to set up a C/C++ programming environment for the Pi Pico, and it looks extremely complicated. But that's where the juicy stuff is.","title":"ADC"},{"location":"assignments/week04.html#programming-the-xiao-rp2040","text":"The Xiao RP2040 has a reset button, so it's more convenient to use in that respect than the Raspberry Pi Pico.","title":"Programming the Xiao RP2040"},{"location":"assignments/week04.html#hello-rp2040","text":"I tried Neil Gershenfeld's Hello RP2040 Python program: I like having an RGB LED to play with. It could serve as a simple interface for the wake-up mask or the robot arm if I assign a different meaning to every color.","title":"Hello RP2040"},{"location":"assignments/week04.html#chip-temperature","text":"I did this one with the Raspberry Pi Pico. Setup and programming is identical to the Xiao RP2040. I found a nice tutorial on using the temperature sensor which is built into the RP2040 chip. It's useful to make sure that the chip doesn't overheat, but it's also a good exercise in measuring an analog voltage. Since I've tried blinking the onboard LED already, I'm going to use that knowledge to modify this program to turn on the LED when the temperature crosses a threshold: from machine import ADC , Pin import time led = machine . Pin ( \"LED\" , machine . Pin . OUT ) adc = machine . ADC ( 4 ) while True : ADC_voltage = adc . read_u16 () * ( 3.3 / ( 65535 )) temperature = 27 - ( ADC_voltage - 0.706 ) / 0.001721 print ( \"Temperature: {} \u00b0C\" . format ( temperature )) if temperature > 26 : led . value ( 1 ) else : led . value ( 0 ) time . sleep_ms ( 100 ) I added the if statement and removed the temperature in Fahrenheit. You can see that you need to scale the analog measurement to get the temperature in degrees Celsius. I suspect that that every chip will give a slightly different value because of manufacturing variability. So it might be better to measure the temperature with a better sensor and put that value into the scaling factor. But since this sensor isn't meant to be super precise, we'll let it be.","title":"Chip temperature"},{"location":"assignments/week04.html#interfacing-with-lcd-screen","text":"I connected a classic 16x2 character LCD screen to the Raspberry Pi Pico, but it didn't work. I needed to use the 4-bit mode (where you connect the LCD directly to the microcontroller) because I didn't have an LCD driver chip. Aby Michael's Fab Academy site had a useful diagram with the LCD pins. The ElectronicWings diagram was even more useful. After some Googling I found that the screen needs 5V signals. The RP2040 is a 3.3V chip, so that's why I'm getting glitchy results. Its strange that it worked with a Pico in the tutorial that I used (click the link in the video description for a connection diagram). I'd like to try the small OLED screen that is in the Fab Lab inventory next. That one is 3-5V tolerant, and much smaller and versatile than the 16x2. And it only needs four pins. Look at that nest of wires above! The OLED will be a great improvement. Here are the files I used, you just open each of them in Thonny and save them to your Pico. The main.py file runs automatically when you power the Pico on. The other two are a library for interfacing with the LCD. Beware, this didn't work for me. Download main.py Download lcd_api.py Download gpio_lcd.py","title":"Interfacing with LCD screen"},{"location":"assignments/week04.html#interfacing-with-oled-screen","text":"I used Kevin McAleer's simple code example for the SSD1306 OLED screen. It just writes Test 1 to the screen. It was exhilarating to see the tiny letters light up on the screen on the first try! I then changed the text to something more useful, a prototype display for the Frankenstein MCU , which Fran and my instructor \u00de\u00f3rarinn are working on. The plan is to make a WiFi connected button in every Fab Lab and the staff push the button every day to show that the lab is active. The connections between labs appear on a spinning globe made in Three.js, which runs in the browser. Download oled.py","title":"Interfacing with OLED screen"},{"location":"assignments/week04.html#stepper-control-with-rp2040","text":"This one I also did with the Raspberry Pi Pico. from machine import Pin from time import sleep IN1 = Pin ( 2 , Pin . OUT ) IN2 = Pin ( 3 , Pin . OUT ) IN3 = Pin ( 4 , Pin . OUT ) IN4 = Pin ( 5 , Pin . OUT ) pins = [ IN1 , IN2 , IN3 , IN4 ] sequence = [[ 1 , 0 , 0 , 0 ],[ 0 , 1 , 0 , 0 ],[ 0 , 0 , 1 , 0 ],[ 0 , 0 , 0 , 1 ]] while True : for step in sequence : for i in range ( len ( pins )): pins [ i ] . value ( step [ i ]) sleep ( 0.01 ) Controlling the stepper is surprisingly simple. It has four phases, so you connect it to four pins on the Pico. Then you just alternate which pin is HIGH while the others are low. The stepper motor has 64 steps per rotation, but it also has a 1/64 gearing ratio, so in total it has 64 * 64 = 4096 steps per rotation. That's pretty good for a 3 dollar stepper! I bought a set of five 28-BYJ-48 steppers with ULN2003 drivers for $15. They're cheap and precise! But there are two drawbacks. They're quite weak. For a motor with 1:64 gearing, it's surprisingly easy to make it skip steps. Also, the gearbox introduces backlash which is much bigger than the step size. The step size is 0.09\u00b0 but the backlash seems to be a few degrees. Maybe it's possible to correct for the slop in software every time the motor changes direction. But that won't work 100% and definitely not with small motions. I wonder if these motors are a good fit for my robot arm. In the video above I changed the sleep value and then ran the program. First it was 0.1 s, then 0.01 s and finally 0.001 s. When I went below 0.01 s, the stepper stalled. It's fun to have status LEDs on all the phases. At slow speeds you can see how the phases are turned on and off. I want to have status LEDS on every output pin on every microcontroller board! It's a really handy debugging tool. I laser cut a press-fit acrylic arrow to see the motor's movement better. Since I have the 12V version of the 28BYJ-48 motors, I can run them on a 9V battery. So here's my first foray into battery-powered electronics. The Pico is still powered by a USB cable, though. I need to learn how to make a 5V regulator board, so that I can also power the Pico using the 9V battery.","title":"Stepper control with RP2040"},{"location":"assignments/week04.html#pio-stepper-control","text":"I followed a tutorial and wrote a program that controls a stepper with PIO, without using the processor at all. I then modified it to include four PIO state machines that run the same program but are clocked at different frequencies: Here's the code: from machine import Pin from rp2 import PIO, StateMachine, asm_pio from time import sleep import sys @asm_pio(set_init=(PIO.OUT_LOW,) * 4) def prog(): wrap_target() set(pins, 8) [31] #8 nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] set(pins, 4) [31] #4 nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] set(pins, 2) [31] #2 nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] set(pins, 1) [31] #1 nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] wrap() sm0 = StateMachine(0, prog, freq=100000, set_base=Pin(2)) sm1 = StateMachine(1, prog, freq=50000, set_base=Pin(6)) sm2 = StateMachine(2, prog, freq=25000, set_base=Pin(10)) sm3 = StateMachine(3, prog, freq=12500, set_base=Pin(18)) sm0.active(1) sm1.active(1) sm2.active(1) sm3.active(1) sleep(5) sm0.active(0) sm1.active(0) sm2.active(0) sm3.active(0) sm0.exec(\"set(pins,0)\") sm1.exec(\"set(pins,0)\") sm2.exec(\"set(pins,0)\") sm3.exec(\"set(pins,0)\") nop() is no operation, and you can optionally add a delay after every command, like this: [1] . That was a delay of one clock cycle. I think 31 cycles is the maximum.","title":"PIO stepper control"},{"location":"assignments/week04.html#adding-a-reset-button","text":"The Raspberry Pi Pico has no reset button, but it resets if you connect the RUN pin to ground. So I just connected a jumper to the RUN pin on the breadboard and make the other end of the jumper touch the GND pin that is one pin over to reset the Pico. That's more convenient than unplugging the USB cable and plugging it in again.","title":"Adding a Reset button"},{"location":"assignments/week04.html#overclocking-the-rp2040","text":"I tried Chris DeHut's RP2040 overclocking video . He has lots of good stuff about the Pico. Here's his program which changes the clock speed a few times and measures the time it takes to do 100.000 sets of the calculations in the Do_Stuff function: ''' PICO default clock speed is 125 MHz Demo to show time to make a bunch of basic math calculations at varaious clock speeds that the PICO can handle ''' import machine import time import machine led_onboard = machine . Pin ( 25 , machine . Pin . OUT ) def Do_Stuff (): st = time . ticks_ms () Y = 0 while Y < 100000 : Y += 1 Z = 57 Z1 = Z + Y Z2 = Z - Y Z3 = Z * ( Z + Y ) #print(Y, Z1, Z2, Z3) led_onboard . value ( 0 ) #print(Y) et = time . ticks_ms () #print(et, st, et-st) return et - st cntr = 0 while cntr < 2 : #run whole test several times for observation cntr += 1 machine . freq ( 125000000 ) #set clock to 125 MHz x = machine . freq () t = Do_Stuff () print ( \" \\n @\" , x , \" time to run =\" , t , \"ms\" ) machine . freq ( 140000000 ) #set clock to 140 MHz x = machine . freq () t = Do_Stuff () print ( \" \\n @ \" , x , \" time to run =\" , t , \"ms\" ) machine . freq ( 200000000 ) #set clock to 200 MHz x = machine . freq () t = Do_Stuff () print ( \" \\n @ \" , x , \" time to run =\" , t , \"ms\" ) machine . freq ( 250000000 ) #set clock to 250 MHz x = machine . freq () t = Do_Stuff () print ( \" \\n @ \" , x , \" time to run =\" , t , \"ms\" ) machine . freq ( 125000000 ) #set clock to 125 MHz to clean things up print ( \" \\n All Done Folks!\" ) The loop runs a few times so that you can take the average of the time measurements. This is one set of measurements: @ 125000000 time to run = 1167 ms @ 140000000 time to run = 1042 ms @ 200000000 time to run = 729 ms @ 250000000 time to run = 583 ms That's a big difference by varying the clock speed, and the RP2040 datasheet says that overclocking is usually safe. I will keep this in mind when my programs are running slowly. Then I tried going a bit higher. It worked at 280 MHz, but at 290 or 300 MHz, I got no response from the microcontroller. So this is the best time: @ 280000000 time to run = 521 ms I think I will not go above 250 MHz. That's a lot, twice as fast as the standard RP2040 with a 12 MHz crystal, like in the Pico and the Xiao RP2040. There are instructions online on how to go up to 420 MHz and there is at least one commercial board that runs the RP2040 at 400 MHz.","title":"Overclocking the RP2040"},{"location":"assignments/week04.html#custom-performance-test","text":"I modified the PIO program to control four steppers at different speeds, by modifying the clock dividers in each of the state machines. The state machines all run the same program. It works, and there should be no load on the main processor. The video looks the same as the four stepper video above. Now let's verify that there is no load on the main processor. I'll take Chris DeHut's 100 thousand calculation routine and put it into the PIO stepper program. Here are the results while driving four steppers at different speeds at the same time: @ 125000000 time to run = 1167 ms @ 140000000 time to run = 1042 ms @ 200000000 time to run = 729 ms @ 250000000 time to run = 584 ms Those are the same times as the first case, which had no steppers. Wow! I then increased the number of calculation loops to 300 thousand, so that we can see what happens to the steppers as we increase the main clock frequency from 125 MHz to 140, 200 and finally 250 MHz. As you can see from the video below, the steppers speed up until the fastest stepper stalls when the clock speed goes up to 250 MHz. For comparison with the PIO routine, I also tried to mix the 100k calculation code with code where the processor controls four steppers at the same time, but i couldn't get those two things to happen at the same time. But I could probably run those things on core0 and core1 with good results. Let's try, using this tutorial to learn how threads work in Python: @ 125000000 time to run = 1181 ms @ 140000000 time to run = 1053 ms @ 200000000 time to run = 734 ms @ 250000000 time to run = 587 ms It worked! My first dual-core program! The steppers just kept on running on core1 after the calculations finished on core0. And the calculation times are good! They're just a few milliseconds longer the 4 stepper PIO + 100k calculation routine. Here's the code: import machine import time import machine from machine import Pin from rp2 import PIO, StateMachine, asm_pio from time import sleep import sys led_onboard = machine.Pin(25, machine.Pin.OUT) @asm_pio(set_init=(PIO.OUT_LOW,) * 4) def prog(): wrap_target() set(pins, 8) [31] #8 nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] set(pins, 4) [31] #4 nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] set(pins, 2) [31] #2 nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] set(pins, 1) [31] #1 nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] nop() [31] wrap() sm0 = StateMachine(0, prog, freq=50000, set_base=Pin(2)) sm1 = StateMachine(1, prog, freq=25000, set_base=Pin(6)) sm2 = StateMachine(2, prog, freq=12500, set_base=Pin(10)) sm3 = StateMachine(3, prog, freq=6250, set_base=Pin(18)) sm0.active(1) sm1.active(1) sm2.active(1) sm3.active(1) #sleep(5) def Do_Stuff(): st = time.ticks_ms() Y = 0 while Y < 300000:+\u00f0\u00f0\u00f0\u00f0 Y += 1 Z = 57 Z1 = Z + Y Z2 = Z - Y Z3 = Z * (Z + Y) #print(Y, Z1, Z2, Z3) led_onboard.value(0) #print(Y) et = time.ticks_ms() #print(et, st, et-st) return et-st cntr = 0 while cntr < 2: #run whole test several times for observation cntr += 1 machine.freq(125000000) #set clock to 125 MHz x = machine.freq() t = Do_Stuff() print(\"\\n@\", x, \" time to run =\", t, \"ms\") machine.freq(140000000) #set clock to 140 MHz x = machine.freq() t = Do_Stuff() print(\"\\n@ \", x, \" time to run =\", t, \"ms\") machine.freq(200000000) #set clock to 200 MHz x = machine.freq() t = Do_Stuff() print(\"\\n@ \", x, \" time to run =\", t, \"ms\") machine.freq(250000000) #set clock to 250 MHz x = machine.freq() t = Do_Stuff() print(\"\\n@ \", x, \" time to run =\", t, \"ms\") machine.freq(125000000) #set clock to 125 MHz to clean things up print(\"\\n All Done Folks!\") sm0.active(0) sm1.active(0) sm2.active(0) sm3.active(0) sm0.exec(\"set(pins,0)\") sm1.exec(\"set(pins,0)\") sm2.exec(\"set(pins,0)\") sm3.exec(\"set(pins,0)\")","title":"Custom performance test"},{"location":"assignments/week04.html#xiao-samd21","text":"The SAMD21 runs at 48 MHz, which is considerably slower than the RP2040. But it's easy to solder. I'll probably use it because of that. It also has a DAC output. For hobbyists, the SAMD21 can only be programmed in the Arduino IDE and CircuitPython. For professionals, you can use Microchip Studio or set up your own toolchain. So I set up the board in the Arduino IDE the way you would set up any new board: I added the proper link into \"Additional Boards Manager URLs\" in Preferences, and then I could find Seeed SAMD21 Boards in the Boards Manager and install them. It's as easy as that, and then selecting the right board (Seeeduino Xiao). The setup is covered in this tutorial . I tried connecting it to the Arduino IDE but it didn't work.","title":"Xiao SAMD21"},{"location":"assignments/week04.html#xiao-esp32-c3","text":"The ESP32 C3 runs at 160 MHz, which is fast, but it's not possible to overclock it. But the most amazing thing is that this tiny microcontroller has WiFi and Bluetooth built in! This is my first RISC-V chip. As Neil says, RISC-V will probably take over the world. It's an open source architecture, so unlike the ARM chips that are found in microcontrollers, billions of smartphones and all the way up to supercomputers, manufacturers don't have to pay huge licensing fees to make RISC-V chips. I tried connecting it to the Arduino IDE but it didn't work.","title":"Xiao ESP32 C3"},{"location":"assignments/week04.html#esp-32-cam","text":"ESP-32 CAM is a 10 dollar WiFi camera! I had to have one and try it out. It might play well with my robot arm. I followed this tutorial to set up the ESP32-CAM board. To install the ESP32 boards in the Arduino IDE, I followed that tutorial . Then I tested the board with this tutorial . I uncommented the AI-Thinker CAM definition in the setup and input my WiFi credentials in the sketch. When the Arduino IDE had uploaded the code, the following message was left hanging: Leaving... Hard resetting via RTS pin... But then I checked and saw that above it, the status bar said \"Done uploading\". I also found this explanation , which said that this isn't an error at all. First I saw nothing in the serial monitor. Then I removed an extra #define camera line. Still nothing. Then I switched to the 5G network, the same as my laptop is on. Then I saw a series of dots form .... but no IP address. But when I did in the opposite order from the tutorial (first pressed RESET, then removed the jumper) I got this message: ets Jul 29 2019 12:21:46 rst:0x1 (POWERON_RESET),boot:0x3 (DOWNLOAD_BOOT(UART0/UART1/SDIO_REI_REO_V2)) waiting for download ets Jul 29 2019 12:21:46 Something's happening! I removed the jumper and pressed reset again: rst:0x1 (POWERON_RESET),boot:0x13 (SPI_FAST_FLASH_BOOT) configsip: 0, SPIWP:0xee clk_drv:0x00,q_drv:0x00,d_drv:0x00,cs0_drv:0x00,hd_drv:0x00,wp_drv:0x00 mode:DIO, clock div:1 load:0x3fff0030,len:1344 load:0x40078000,len:13836 load:0x40080400,len:3608 entry 0x400805f0 . WiFi connected Camera Ready! Use 'http://192.168.1.32' to connect Success! After scrolling down and pressing \"Start stream\", I could see a live stream from the camera over WiFi to my browser! At QVGA resolution (320x240), the stream is smooth. At UXGA resolution (1600x1200), the stream stutters. SVGA (800x600) is the highest resolution where lag isn't very noticeable. That's pretty good for a microcontroller! I love it. And I find that in low light, greyscale looks a lot better than a color image. Note This time I used ffmpeg -i esp32-cam_test1.mp4 -vf scale=400:-2 -vcodec libx264 -an -crf 20 esp32-cam_test1_web.mp4 to compress the video and remove the audio as shown here . This video shows ways to control the image capture just by typing URLs into the browser. If you have a computer nearby at all times, you can also stream video from the ESP32-CAM to any browser in the world. And here's a guide to modifying the HTML code inside the Arduino code. It's not obvious how to do it, since the HTML has been converted to hex code in the Arduino sketch. I checked whether it's possible to overclock the ESP32, but 240 MHz seems to be the maximum clock speed.","title":"ESP-32 CAM"},{"location":"assignments/week04.html#fpga","text":"I bought an FPGA development board from Seeed Studio. It is the Runber board , which contains a Gowin FPGA. I took the plunge because it is the cheapest FPGA board I've found that still has a set of tutorials . I looked at some of the documentation and it looks intimidating. I applied for a software license on a Friday and got it in the beginning of the next week. I haven't had the time to set it up and go through a tutorial, though. .md-content__button { display: none; }","title":"FPGA"},{"location":"assignments/week05.html","text":"3D Scanning and Printing Benchmarks I made a few test prints to characterize the Ultimaker 2+ and Ultimaker 3. These are our two workhorses. The classic 3D printing benchmark is the 3D Benchy by Creative Tools in Sweden. It came out well, with only a tiny bit of drooping in the top of the doorway. A nice 3D Benchy. I printed this tolerance test by 3D Maker Noob. The clearance test turned out well, with only the tightest clearance (0.1 mm) getting fused together. I would say that's very good. I've 3D printed professionally for years, but I'm still amazed how precise these computer-controlled hot glue guns can be. A hot glue gun is actually the origin of FDM 3D printing , by the way. 3D scanning I couldn't think of anything to 3D scan but just at the right time, an artist came into the lab with a small sculpture and asked if we could 3D scan and print it. I gave it my best shot. The sculpture to be scanned. The creature is based on a picture of the old Icelandic mythical beast Fj\u00f6rulalli, which looks mostly like a seal and lives on the beach. It's most commonly found in the West Fjords and Brei\u00f0afj\u00f6r\u00f0ur. It looked like a seal and acted like a seal. The Fj\u00f6rulalli is probably our least mysterious mythical creature. But I really like Angela Muenther's sculpture of it. I followed Christopher Barnatt's excellent tutorial in this project. We started by clearing a table and closing the blinds, because you want nice, even light for photogrammetry. We placed the little guy on some fabric with a pattern, because that helps the algorithm locate the pictures in space afterwards and stitch them together. Then we took pictures on a small point-and-shoot camera all around the sculpture in circles with increasing height. Then I loaded the images into the excellent Meshroom , which is free and open-source. I had tried Meshroom once before, but it didn't work for some reason, so I gave up. This time there was time pressure, so I became more resourceful. I found that the path to the images contained Icelandic characters. Once I had fixed that, the program started running, the computer started humming and in about an hour and a half, I had a 3D point cloud! Raw point cloud in Meshroom. Then I took the mesh into Meshmixer and deleted everything that I didn't want to include. The final 3D processing step was in Blender, where I filled in the bottom of the model to make the mesh watertight and then smoothed it in a few places, using my new Blender skills from having gone through the donut tutorial last week. Loop select and fill in Blender. The smoothing tool in the sculpture environment in Blender. Then I 3D printed two sculptures in almost exactly the same gray color as the original sculpture. One had smoothing, the other had none. When Angela came to examine them the next day, she said she wanted something in between, so I started again and did the smoothing with more finesse this time. Then I printed some more of them, in a few different materials. One in transparent PETG and one in glow-in-the-dark PLA. The output of this week. I used this tip to get the printing temperature right for PETG, for the transparent sculpture print. PETG print with Lighning infill in Cura. Angela really liked the glow-in-the-dark version. So do I. Angela, far left, describing her exhibition. The exhibition at Galler\u00ed \u00dathverfa . Design for 3D printing The next task was to design and 3D print something that you can't make subtractively (e.g. using milling). I've been interested in lattice structures for a while, so I took a stab at making one. The 3D sketch in Fusion 360. I sweep the circle along the line to make a cylinder. Then I repeat. I made a rectangular pattern of this lattice and 3D printed it. The red one is hard PLA and the white one is soft TPU. Download lattice test file I really liked the soft lattice, so I wanted to make a bigger version of it. I made a much bigger rectangular pattern in Fusion 360. Then I modeled a ball and used it to cut the pattern. Fortunately my laptop is quite powerful, so it went without a hitch. This is probably the biggest pattern I've made. Then I thought about how to print this thing. It needs support on the bottom side, but will regular supports generated by Cura be good enough? I decided to model the supports myself. To decide on the gap between the model and support structure, I used this reference . I decided to go for a 0.2 mm air gap, so I offset the circle by 0.4 mm. Support bowl for the lattice structure. Removing the supports went really smoothly! After cutting with scissors, I could tear the ball away from the bowl. \u00de\u00f3rarinn immediately suggested that I turn the ball into a lamp. I used this guide from Creality to adjust the Generic TPU profile in Cura for the CR-10 printer. The CR-series printers are OK for tinkerers but not for Fab Labs or other professional users. I would not let anyone use the CR-10 printers unsupervised. You have to level the bed almost every time you print, and often make adjustments after you start printing. Usually I need to start printing two or three times. It's not an enjoyable experience. Contrast that with the Ultimaker 2+ (I have one myself and it's my favorite 3D printer). It just prints, and you rarely have to level it. It's a real workhorse. I've printed for over 2000 hours on my machine and it's still printing like it's new. I don't like the direction Ultimaker is taking, however, with super expensive printers for professional users. I printed the ball using TPU (thermoplastic polyurethane) and it bounces and works great as a ball to play with! I used 1.75 mm TPU on the Creality CR-10, which has a Bowden extruder. What that means is that it pushes on the filament and it acts like a long, soft spring. So to get good results I had to print really slowly. The print took five days to complete. The lattice ball has become one of the favorite objects to play with in the lab. You can even bounce it off the floor without breaking it. Here are my students in the Fab Lab course at the local high school: Unfortunately the Fusion 360 file is 65 MB, so I can't include it here. But I've put the STL file on Sketchfab . .md-content__button { display: none; }","title":"5. 3D Scanning and Printing"},{"location":"assignments/week05.html#3d-scanning-and-printing","text":"","title":"3D Scanning and Printing   "},{"location":"assignments/week05.html#benchmarks","text":"I made a few test prints to characterize the Ultimaker 2+ and Ultimaker 3. These are our two workhorses. The classic 3D printing benchmark is the 3D Benchy by Creative Tools in Sweden. It came out well, with only a tiny bit of drooping in the top of the doorway. A nice 3D Benchy. I printed this tolerance test by 3D Maker Noob. The clearance test turned out well, with only the tightest clearance (0.1 mm) getting fused together. I would say that's very good. I've 3D printed professionally for years, but I'm still amazed how precise these computer-controlled hot glue guns can be. A hot glue gun is actually the origin of FDM 3D printing , by the way.","title":"Benchmarks"},{"location":"assignments/week05.html#3d-scanning","text":"I couldn't think of anything to 3D scan but just at the right time, an artist came into the lab with a small sculpture and asked if we could 3D scan and print it. I gave it my best shot. The sculpture to be scanned. The creature is based on a picture of the old Icelandic mythical beast Fj\u00f6rulalli, which looks mostly like a seal and lives on the beach. It's most commonly found in the West Fjords and Brei\u00f0afj\u00f6r\u00f0ur. It looked like a seal and acted like a seal. The Fj\u00f6rulalli is probably our least mysterious mythical creature. But I really like Angela Muenther's sculpture of it. I followed Christopher Barnatt's excellent tutorial in this project. We started by clearing a table and closing the blinds, because you want nice, even light for photogrammetry. We placed the little guy on some fabric with a pattern, because that helps the algorithm locate the pictures in space afterwards and stitch them together. Then we took pictures on a small point-and-shoot camera all around the sculpture in circles with increasing height. Then I loaded the images into the excellent Meshroom , which is free and open-source. I had tried Meshroom once before, but it didn't work for some reason, so I gave up. This time there was time pressure, so I became more resourceful. I found that the path to the images contained Icelandic characters. Once I had fixed that, the program started running, the computer started humming and in about an hour and a half, I had a 3D point cloud! Raw point cloud in Meshroom. Then I took the mesh into Meshmixer and deleted everything that I didn't want to include. The final 3D processing step was in Blender, where I filled in the bottom of the model to make the mesh watertight and then smoothed it in a few places, using my new Blender skills from having gone through the donut tutorial last week. Loop select and fill in Blender. The smoothing tool in the sculpture environment in Blender. Then I 3D printed two sculptures in almost exactly the same gray color as the original sculpture. One had smoothing, the other had none. When Angela came to examine them the next day, she said she wanted something in between, so I started again and did the smoothing with more finesse this time. Then I printed some more of them, in a few different materials. One in transparent PETG and one in glow-in-the-dark PLA. The output of this week. I used this tip to get the printing temperature right for PETG, for the transparent sculpture print. PETG print with Lighning infill in Cura. Angela really liked the glow-in-the-dark version. So do I. Angela, far left, describing her exhibition. The exhibition at Galler\u00ed \u00dathverfa .","title":"3D scanning"},{"location":"assignments/week05.html#design-for-3d-printing","text":"The next task was to design and 3D print something that you can't make subtractively (e.g. using milling). I've been interested in lattice structures for a while, so I took a stab at making one. The 3D sketch in Fusion 360. I sweep the circle along the line to make a cylinder. Then I repeat. I made a rectangular pattern of this lattice and 3D printed it. The red one is hard PLA and the white one is soft TPU. Download lattice test file I really liked the soft lattice, so I wanted to make a bigger version of it. I made a much bigger rectangular pattern in Fusion 360. Then I modeled a ball and used it to cut the pattern. Fortunately my laptop is quite powerful, so it went without a hitch. This is probably the biggest pattern I've made. Then I thought about how to print this thing. It needs support on the bottom side, but will regular supports generated by Cura be good enough? I decided to model the supports myself. To decide on the gap between the model and support structure, I used this reference . I decided to go for a 0.2 mm air gap, so I offset the circle by 0.4 mm. Support bowl for the lattice structure. Removing the supports went really smoothly! After cutting with scissors, I could tear the ball away from the bowl. \u00de\u00f3rarinn immediately suggested that I turn the ball into a lamp. I used this guide from Creality to adjust the Generic TPU profile in Cura for the CR-10 printer. The CR-series printers are OK for tinkerers but not for Fab Labs or other professional users. I would not let anyone use the CR-10 printers unsupervised. You have to level the bed almost every time you print, and often make adjustments after you start printing. Usually I need to start printing two or three times. It's not an enjoyable experience. Contrast that with the Ultimaker 2+ (I have one myself and it's my favorite 3D printer). It just prints, and you rarely have to level it. It's a real workhorse. I've printed for over 2000 hours on my machine and it's still printing like it's new. I don't like the direction Ultimaker is taking, however, with super expensive printers for professional users. I printed the ball using TPU (thermoplastic polyurethane) and it bounces and works great as a ball to play with! I used 1.75 mm TPU on the Creality CR-10, which has a Bowden extruder. What that means is that it pushes on the filament and it acts like a long, soft spring. So to get good results I had to print really slowly. The print took five days to complete. The lattice ball has become one of the favorite objects to play with in the lab. You can even bounce it off the floor without breaking it. Here are my students in the Fab Lab course at the local high school: Unfortunately the Fusion 360 file is 65 MB, so I can't include it here. But I've put the STL file on Sketchfab . .md-content__button { display: none; }","title":"Design for 3D printing"},{"location":"assignments/week06.html","text":"Electronics Design Designing a simple board in KiCAD Here's a video where I describe what I did in Electronics Design week to Neil Gershenfeld during random review. A few months ago I went through a short KiCAD tutorial, to get to know the program a little bit. But I got a lot more out of designing a little PCB with an LED and a button and making it on the vinyl cutter this week. Designing something without making it doesn't get me excited to do more stuff. After making the little circuit, I really want to make more. Let's get into it. The fab library I cloned the Fab Lab parts library for KiCAD from the Gitlab repository. It's maintained by Krisjanis Rijnieks in Finland. To use the library, I needed to upgrade to KiCAD 7. I started by going into Preferences and Manage Symbol Libraries. There I clicked the + symbol to import a new library and navigated to the location of the fab library, which is called fab.kicad_sym. Then I went into Preferences and Manage Footprint Libraries and did the same, but that file is called fab.pretty. KiCAD has many parts in its libraries, but the fab library has been the most useful by far. The schematic Having imported all the components in the Fab Lab Inventory , I could get started with my design. I created a new design in a new folder. In the Schematic editor I pressed A on the keyboard to add components, and added a 1206 diode, a 1206 resistor to go with it, the Xiao RP2040 module and a pushbutton. I also added a power symbol and a ground symbol. Then I used the wire tool on the right side of the interface to connect the components together in a way that made sense to me. My first schematic. Routing the PCB Next, I opened the PCB Editor. I placed the components there all at once. They were connected by thin lines called a rat's nest, but the routing of the physical traces requires a second step. I moved the parts into a pleasing arrangement and then connected them together with the wire tool in the toolbar on the right. After selecting Update PCB from Schematic, I got the parts all in a bundle. Then I realized that I probably had the wrong button, so I swapped it out. Then I arranged the parts in a pleasing way routed traces between them by pressing X on the keyboard. Arranged and routed. 3D Viewer Then I tried the 3D Viewer (View -> 3D Viewer) and got a warning that I needed to define a board outline. Board outline missing. So I added a board outline to Edge Cuts. A day or two earlier, my elder son Ernir said that he wanted to make an electric thing in the shape of a circle, and close it off on all sides except one, so that we could connect it to another circle. So I made the outline a circle. Circular board outline. I moved the components around some more until I was happy. Then I pressed Alt + 3 on the keyboard to get a 3D preview. That was underwhelming, since none of the components I used have a 3D model associated with them. I wanted at least to have the Xiao module, so that I could design an enclosure around its USB connector (if I had the time). I found the model on the SeeedStudio web site . It includes a Fusion 360 design file, but I only need the STEP file. I put the STEP file into the fab.3dshapes folder in the fab library and used this tutorial to connect it to the KiCAD footprint. Now we're talking! I can see the 3D model being useful when designing enclosures. A few fixes I exported the design as an SVG and opened it in Inkscape. It wasn't until I saw it there that I realized that the traces were too thin to cut on the vinyl cutter. I needed to go back into the PCB Editor and use a quick and dirty way to change the track width. I selected one part of each trace, then pressed U to select the whole trace and then pressed E to open the trace properties. There I could change the trace width from 0.25 mm to 1 mm. That's better. Next time I'll set the track width from the start in the proper way using Netclasses in File -> Board Setup. That's much more convenient, since every new trace will have the width that I've defined. This is how you set the trace width properly for the whole board. Now I have a completed design in the PCB editor, ready to export to Inkscape: Xiao RP2040 LED board with a button. Here are the KiCAD files for this project, including the PDF file that is ready for cutting: Download KiCAD project Selecting a resistor for the LED Now I needed to find the right resistor to use with the blue LED. I found a very useful tutorial from Digikey, which is where I got my parts from. Diodes are a one-way valve for electrons. Electrons can go one way, but not the other. When diodes let current through, they have almost no resistance, and that's great. Then someone discovered that diodes can emit light. Now LEDs are everywhere and they have one caveat: You can't just connect one to a power source and have it work. Because it's such a good conductor, it will let more current through that it can handle. It will burn up, or even blow up! So we need a helper component. Every LED needs a current limiting resistor in order to survive. So, how do we select the right resistor for our blue LED? Let's use Ohm's Law: Ohm's Law pyramid. Coincidentally, VIR means 'wire' in Icelandic. It's fun to be able to make graphics like these in Inkscape and render them easily on a web page. Ok, I need to know the Source voltage: 3.3 V LED forward voltage: 3.2 V (max 3.5 V) LED's recommended operating current: 30 mA But there's a catch: After reading the datasheet, I know that RP2040 can only supply 16 mA from each GPIO pin. So that's the number I'll use. The voltage drop across the resistor needs to be \\[V_{source}-V_{forward}\\] \\[3.3 V - 3.2 V = 0.1 V\\] Now let's use the pyramid to get an expression for the resistor: \\[R = \\frac{V}{I} = \\frac{0.1V}{0.016A} = 6.25 \u03a9\\] I have the resistance! But there's one more step: I need to make sure that the resistor can take the heat. I'll use the power formula to see the power dissipated by the resistor: \\[P = I\u22c5V\\] \\[ = 0.016 A \u22c5 0.1 V \\] \\[ = 0.0016 W \\] \\[ = 1.6 mW \\] That's insignificant. At the Fab Lab I have 4.99\u03a9 and 10\u03a9 resistors. Let's check the current through the LED: 4.99\u03a9: \\[I = \\frac{V}{R} = \\frac{0.1V}{4.99\u03a9} = 0.02A \\] 10\u03a9: \\[I = \\frac{V}{R} = \\frac{0.1V}{10\u03a9} = 0.01A\\] OK, I'll use the 10\u03a9 resistor. Using an oscilloscope I programmed a Raspberry Pi Pico to output stepper motor control signals. This is the same MicroPython code as I used in Embedded Programming week. It's a very simple way of creating signals for a 28BYJ-48 stepper motor . from machine import Pin from time import sleep IN1 = Pin ( 2 , Pin . OUT ) IN2 = Pin ( 3 , Pin . OUT ) IN3 = Pin ( 4 , Pin . OUT ) IN4 = Pin ( 5 , Pin . OUT ) pins = [ IN1 , IN2 , IN3 , IN4 ] sequence = [[ 1 , 0 , 0 , 0 ],[ 0 , 1 , 0 , 0 ],[ 0 , 0 , 1 , 0 ],[ 0 , 0 , 0 , 1 ]] while True : for step in sequence : for i in range ( len ( pins )): pins [ i ] . value ( step [ i ]) sleep ( 1 ) I set the probe to 10X sensitivity: And here's what I got on the oscilloscope: A clear step signal, at just over 60Hz. Time is on the X-axis and voltage is on the Y-axis. Then when the oscilloscope starts receiving a signal I press Auto adjust, and the scope detects how the signal looks and adjusts the time scale so the signal is steady on the screen. I used the portable digital oscilloscope to troubleshoot my Output Devices board: I found erratic signals coming from one of the H-bridges, which caused my brushless motor to move erratically. More info is here . Using a multimeter In Computer-Controlled Cutting week, I measured a resistor with a multimeter: Here the circuit needs to be powered off. The resistor says 1002, and it indeed measures as 10kOhm. In Output Devices week, I measured the voltage that an OLED gets from the power supply: Here the circuit needs to be powered on. The OLED gets roughly 5V, as it shuould be. The OLED can operate on 3.3V-5V voltage. I also measured the current that the OLED draws: Here I need to break the circuit and put the current meter into the circuit, so that the current flows through it. Inside the current meter is a resistor with a very small, known resistance. The voltage drop over the resistor is measured and from that, the device calculates the current. On the right a bigger part of the screen is turned on, and it shows in the current measurement. .md-content__button { display: none; }","title":"6. Electronics Design"},{"location":"assignments/week06.html#electronics-design","text":"","title":"Electronics Design"},{"location":"assignments/week06.html#designing-a-simple-board-in-kicad","text":"Here's a video where I describe what I did in Electronics Design week to Neil Gershenfeld during random review. A few months ago I went through a short KiCAD tutorial, to get to know the program a little bit. But I got a lot more out of designing a little PCB with an LED and a button and making it on the vinyl cutter this week. Designing something without making it doesn't get me excited to do more stuff. After making the little circuit, I really want to make more. Let's get into it.","title":"Designing a simple board in KiCAD   "},{"location":"assignments/week06.html#the-fab-library","text":"I cloned the Fab Lab parts library for KiCAD from the Gitlab repository. It's maintained by Krisjanis Rijnieks in Finland. To use the library, I needed to upgrade to KiCAD 7. I started by going into Preferences and Manage Symbol Libraries. There I clicked the + symbol to import a new library and navigated to the location of the fab library, which is called fab.kicad_sym. Then I went into Preferences and Manage Footprint Libraries and did the same, but that file is called fab.pretty. KiCAD has many parts in its libraries, but the fab library has been the most useful by far.","title":"The fab library"},{"location":"assignments/week06.html#the-schematic","text":"Having imported all the components in the Fab Lab Inventory , I could get started with my design. I created a new design in a new folder. In the Schematic editor I pressed A on the keyboard to add components, and added a 1206 diode, a 1206 resistor to go with it, the Xiao RP2040 module and a pushbutton. I also added a power symbol and a ground symbol. Then I used the wire tool on the right side of the interface to connect the components together in a way that made sense to me. My first schematic.","title":"The schematic"},{"location":"assignments/week06.html#routing-the-pcb","text":"Next, I opened the PCB Editor. I placed the components there all at once. They were connected by thin lines called a rat's nest, but the routing of the physical traces requires a second step. I moved the parts into a pleasing arrangement and then connected them together with the wire tool in the toolbar on the right. After selecting Update PCB from Schematic, I got the parts all in a bundle. Then I realized that I probably had the wrong button, so I swapped it out. Then I arranged the parts in a pleasing way routed traces between them by pressing X on the keyboard. Arranged and routed.","title":"Routing the PCB"},{"location":"assignments/week06.html#3d-viewer","text":"Then I tried the 3D Viewer (View -> 3D Viewer) and got a warning that I needed to define a board outline. Board outline missing. So I added a board outline to Edge Cuts. A day or two earlier, my elder son Ernir said that he wanted to make an electric thing in the shape of a circle, and close it off on all sides except one, so that we could connect it to another circle. So I made the outline a circle. Circular board outline. I moved the components around some more until I was happy. Then I pressed Alt + 3 on the keyboard to get a 3D preview. That was underwhelming, since none of the components I used have a 3D model associated with them. I wanted at least to have the Xiao module, so that I could design an enclosure around its USB connector (if I had the time). I found the model on the SeeedStudio web site . It includes a Fusion 360 design file, but I only need the STEP file. I put the STEP file into the fab.3dshapes folder in the fab library and used this tutorial to connect it to the KiCAD footprint. Now we're talking! I can see the 3D model being useful when designing enclosures.","title":"3D Viewer"},{"location":"assignments/week06.html#a-few-fixes","text":"I exported the design as an SVG and opened it in Inkscape. It wasn't until I saw it there that I realized that the traces were too thin to cut on the vinyl cutter. I needed to go back into the PCB Editor and use a quick and dirty way to change the track width. I selected one part of each trace, then pressed U to select the whole trace and then pressed E to open the trace properties. There I could change the trace width from 0.25 mm to 1 mm. That's better. Next time I'll set the track width from the start in the proper way using Netclasses in File -> Board Setup. That's much more convenient, since every new trace will have the width that I've defined. This is how you set the trace width properly for the whole board. Now I have a completed design in the PCB editor, ready to export to Inkscape: Xiao RP2040 LED board with a button. Here are the KiCAD files for this project, including the PDF file that is ready for cutting: Download KiCAD project","title":"A few fixes"},{"location":"assignments/week06.html#selecting-a-resistor-for-the-led","text":"Now I needed to find the right resistor to use with the blue LED. I found a very useful tutorial from Digikey, which is where I got my parts from. Diodes are a one-way valve for electrons. Electrons can go one way, but not the other. When diodes let current through, they have almost no resistance, and that's great. Then someone discovered that diodes can emit light. Now LEDs are everywhere and they have one caveat: You can't just connect one to a power source and have it work. Because it's such a good conductor, it will let more current through that it can handle. It will burn up, or even blow up! So we need a helper component. Every LED needs a current limiting resistor in order to survive. So, how do we select the right resistor for our blue LED? Let's use Ohm's Law: Ohm's Law pyramid. Coincidentally, VIR means 'wire' in Icelandic. It's fun to be able to make graphics like these in Inkscape and render them easily on a web page. Ok, I need to know the Source voltage: 3.3 V LED forward voltage: 3.2 V (max 3.5 V) LED's recommended operating current: 30 mA But there's a catch: After reading the datasheet, I know that RP2040 can only supply 16 mA from each GPIO pin. So that's the number I'll use. The voltage drop across the resistor needs to be \\[V_{source}-V_{forward}\\] \\[3.3 V - 3.2 V = 0.1 V\\] Now let's use the pyramid to get an expression for the resistor: \\[R = \\frac{V}{I} = \\frac{0.1V}{0.016A} = 6.25 \u03a9\\] I have the resistance! But there's one more step: I need to make sure that the resistor can take the heat. I'll use the power formula to see the power dissipated by the resistor: \\[P = I\u22c5V\\] \\[ = 0.016 A \u22c5 0.1 V \\] \\[ = 0.0016 W \\] \\[ = 1.6 mW \\] That's insignificant. At the Fab Lab I have 4.99\u03a9 and 10\u03a9 resistors. Let's check the current through the LED: 4.99\u03a9: \\[I = \\frac{V}{R} = \\frac{0.1V}{4.99\u03a9} = 0.02A \\] 10\u03a9: \\[I = \\frac{V}{R} = \\frac{0.1V}{10\u03a9} = 0.01A\\] OK, I'll use the 10\u03a9 resistor.","title":"Selecting a resistor for the  LED"},{"location":"assignments/week06.html#using-an-oscilloscope","text":"I programmed a Raspberry Pi Pico to output stepper motor control signals. This is the same MicroPython code as I used in Embedded Programming week. It's a very simple way of creating signals for a 28BYJ-48 stepper motor . from machine import Pin from time import sleep IN1 = Pin ( 2 , Pin . OUT ) IN2 = Pin ( 3 , Pin . OUT ) IN3 = Pin ( 4 , Pin . OUT ) IN4 = Pin ( 5 , Pin . OUT ) pins = [ IN1 , IN2 , IN3 , IN4 ] sequence = [[ 1 , 0 , 0 , 0 ],[ 0 , 1 , 0 , 0 ],[ 0 , 0 , 1 , 0 ],[ 0 , 0 , 0 , 1 ]] while True : for step in sequence : for i in range ( len ( pins )): pins [ i ] . value ( step [ i ]) sleep ( 1 ) I set the probe to 10X sensitivity: And here's what I got on the oscilloscope: A clear step signal, at just over 60Hz. Time is on the X-axis and voltage is on the Y-axis. Then when the oscilloscope starts receiving a signal I press Auto adjust, and the scope detects how the signal looks and adjusts the time scale so the signal is steady on the screen. I used the portable digital oscilloscope to troubleshoot my Output Devices board: I found erratic signals coming from one of the H-bridges, which caused my brushless motor to move erratically. More info is here .","title":"Using an oscilloscope"},{"location":"assignments/week06.html#using-a-multimeter","text":"In Computer-Controlled Cutting week, I measured a resistor with a multimeter: Here the circuit needs to be powered off. The resistor says 1002, and it indeed measures as 10kOhm. In Output Devices week, I measured the voltage that an OLED gets from the power supply: Here the circuit needs to be powered on. The OLED gets roughly 5V, as it shuould be. The OLED can operate on 3.3V-5V voltage. I also measured the current that the OLED draws: Here I need to break the circuit and put the current meter into the circuit, so that the current flows through it. Inside the current meter is a resistor with a very small, known resistance. The voltage drop over the resistor is measured and from that, the device calculates the current. On the right a bigger part of the screen is turned on, and it shows in the current measurement. .md-content__button { display: none; }","title":"Using a multimeter"},{"location":"assignments/week07.html","text":"Computer-Controlled Machining Design I designed an extended table with a shelf for our coffee corner. I wondered how I could make the shelf supports look good: Version 1 Version 2 Version 3 I ended up picking version 3. Here's an example of how convenient parametric design can be: And here's the whole design process. First I create the parts and adjust their sizes, then I model the press-fit dogbone joints and finally I move the parts into one plane and nest them manually to fit onto the plywood plate that I have. I need to create the plywood plate as a part in the model, so that I can use it as the stock when setting up the machining toolpaths. Computer-Aided Machining I start by creating a Setup. Under the Stock tab, I select the body that represents my plywood sheet: It's good practice to name the bodies in the model. It makes things easier. Then in the Setup tab, I set up the work coordinate system. I set the zero point to be the bottom left corner of the sheet corner, looking at it from above (see the image above). The top surface is the Z reference. Close-up of how I define the work origin for the Shopbot. Now I start creating machining operations. I had to create the chamfering tool in Fusion, it's not complicated. Cutter tab Cutting data tab You can select the type of milling bit from a drop down list and then specify its dimensions according to the bit that you have. In the Cutting data tab you also input the feeds and speeds for this bit. \u00de\u00f3rarinn recommended that for plywood I would choose a spindle speed of 14.000 rpm and a feed rate of 1000 mm/min. Then I chose a 2D contour milling operation and selected only the edges that I wanted chamfered. For some reason it worked better to split the operations up and select only one edge in each one. Under the Passes tab, uncheck the Stock to leave option. The first pass is the finishing pass. Geometry tab Heights tab Remember to select Stock bottom as the bottom plane. Next, I mill the dogbone press-fit holes. I select a 6 mm flat end mill with the same feeds and speeds as the chamfering tool. I created one 2D pocket operation for each dogbone pocket. The simulation looks good. I had to enlarge the circles in the dogbone to make sure that the milling bit can enter them. The last operation is to mill all the outline, using the same 6 mm flat endmill, again rotating at 14.000 rpm and moving at 1000 mm/min through the material: The outline milling operation. It's a 2D contour operation, like the chamfering. I had the machine leave rectangular shaped tabs, to keep the parts from moving around while finishing the milling operation. The ShopBot Turning the machine on To be able to turn on the spindle, you need to reset the machine controller. Before you do anything in the Shopbot control software, you need to open Tools -> Spindle control. Otherwise the software can't change the spindle speed as the GCode requires. Then you go into Cuts -> Spindle Warmup Routine, to spin the spindle at two speeds for 10 or 15 minutes, to warm up the bearings in the spindle. You go into Cuts -> Zero Z-axis w/ Zzero plate to zero the Z-axis with a conductive metal plate. Then you press K to get the KeyPad window and use the arrow keys on the keyboard to move the spindle to a convenient point before you go into Zero -> Zero [2] axes (X & Y) in the software. Turning on the shop vac just before starting the job. Recalibrating the ShopBot When I made a fit test with two slightly different clearances, I discovered that both of them were way off the mark. I asked my instructor \u00de\u00f3rarinn what might cause this and he suggested that the gearboxes on the stepper motors might be wearing out. This may result in the steps per inch value changing. So I modeled a 100 mm by 100 mm square with a circular pocket in the middle. My instructor \u00de\u00f3rarinn suggested that I also chamfer the corners, so that I could make diagonal measurements. The resulting square had 100.5 mm sides. That means that I need to use this scaling factor on the steps per inch value. I can change that value in the Unit Values window in the Shopbot control software. \\[ \\frac{100}{100.5} = 0.9950249 \\] Corrected value: \\[ 0.9950 * 97.7487 = \\underline{97.262389} \\; \\textnormal{steps/inch} \\] steps/inch. Let's change the unit values for the X and Y axes in the ShopBot control software: The X and Y unit values were 97.7487 and then I changed them to 97.2624. Now our ShopBot is more accurate. Test pieces I designed a test fit piece with different clearances in Fusion 360. These fit tests gave me the confidence to finish the design of the coffee corner and mill all the parts in one go. Hexagon fit test pieces. I also did a chamfering test, here's the CAM simulation: I'm glad I also did this test, because I had selected a chamfering tool that was too small. Both are 90\u00b0 bits, but one is bigger than the other. And that makes a difference when you carve this deep. Milling the parts for the coffee corner It was exciting to do sophisticated milling like this with confidence. I fixed the plywood sheet with screws in the corners and on the middle of the long sides. I took care to tie my hair into a bun and wear hearing protection and safety glasses. First I milled just the chamfers into the plywood sheet. Then I changed from the chamfer tool to the 6 mm flat end mill and milled the dogbone pockets. After that operation it still didn't look like anything recognizable. Finally I milled the outlines and then I recognized the parts. Top view after milling. Dislodging the sawdust with a screwdriver. Vaccuuming the sawdust. Breaking the tab. Breaking the tabs with a chisel. Sanding the edges. I chamfered a few corners before hammering the parts together, to make sure that they sit flush against each other. My instructor \u00de\u00f3rarinn suggested that I take a little off the corners to compensate for the inner corner radius in the piece that gets hammered into these holes. Hammering the parts together. They fit tightly together and don't require any glue. Our new documentation station. I really get in the zone when I sit here and write documentation. I think it's because I feel like I'm in a caf\u00e9, and that's where I get the best concentration. Look how serious I am! The next time I mill something out of wood using Fusion 360, I may need to use a different tab shape. Look at the burn marks after the machine stopped to mill this tab: There are burn marks where all the tabs were. While I was hard at work finishing up the documentation, my instructor \u00de\u00f3rarinn painted the table black and attached it and the shelf to the wall. Bas Withagen in Fab Lab Reykjav\u00edk used to say that if you have time to paint your final project, you're doing it wrong. Fortunately, I have my trusty instructor \u00de\u00f3rarinn, who painted the table black. He's there with his dog S\u00f3la, who was old and had become blind from diabetes. She laid at my desk as I worked on the Fab Academy documentation. S\u00f3la only lived for a week after I took this picture. Such a sweet dog. Our lovely coffee corner with a freshly painted table and \u00de\u00f3rarinn's espresso machine. \u00de\u00f3rarinn commented that now there was really no spot in the lab that wasn't cozy and enticing to sit down and work on a laptop. Download Coffee Corner Fusion 360 model Download 12 mm fit test Fusion 360 model Download 12 mm fit test - more clearance Fusion 360 model Download 12 mm hexagon test Fusion 360 model Download 15 mm fit test Fusion 360 model Download 15 mm chamfer test Fusion 360 model Download rectangle circle test VCarve file Download rectangle circle test - quarter inch bit VCarve file Download rectangle circle test - 22mm MDF VCarve file .md-content__button { display: none; }","title":"7. Computer-Controlled Machining"},{"location":"assignments/week07.html#computer-controlled-machining","text":"","title":"Computer-Controlled Machining   "},{"location":"assignments/week07.html#design","text":"I designed an extended table with a shelf for our coffee corner. I wondered how I could make the shelf supports look good: Version 1 Version 2 Version 3 I ended up picking version 3. Here's an example of how convenient parametric design can be: And here's the whole design process. First I create the parts and adjust their sizes, then I model the press-fit dogbone joints and finally I move the parts into one plane and nest them manually to fit onto the plywood plate that I have. I need to create the plywood plate as a part in the model, so that I can use it as the stock when setting up the machining toolpaths.","title":"Design"},{"location":"assignments/week07.html#computer-aided-machining","text":"I start by creating a Setup. Under the Stock tab, I select the body that represents my plywood sheet: It's good practice to name the bodies in the model. It makes things easier. Then in the Setup tab, I set up the work coordinate system. I set the zero point to be the bottom left corner of the sheet corner, looking at it from above (see the image above). The top surface is the Z reference. Close-up of how I define the work origin for the Shopbot. Now I start creating machining operations. I had to create the chamfering tool in Fusion, it's not complicated. Cutter tab Cutting data tab You can select the type of milling bit from a drop down list and then specify its dimensions according to the bit that you have. In the Cutting data tab you also input the feeds and speeds for this bit. \u00de\u00f3rarinn recommended that for plywood I would choose a spindle speed of 14.000 rpm and a feed rate of 1000 mm/min. Then I chose a 2D contour milling operation and selected only the edges that I wanted chamfered. For some reason it worked better to split the operations up and select only one edge in each one. Under the Passes tab, uncheck the Stock to leave option. The first pass is the finishing pass. Geometry tab Heights tab Remember to select Stock bottom as the bottom plane. Next, I mill the dogbone press-fit holes. I select a 6 mm flat end mill with the same feeds and speeds as the chamfering tool. I created one 2D pocket operation for each dogbone pocket. The simulation looks good. I had to enlarge the circles in the dogbone to make sure that the milling bit can enter them. The last operation is to mill all the outline, using the same 6 mm flat endmill, again rotating at 14.000 rpm and moving at 1000 mm/min through the material: The outline milling operation. It's a 2D contour operation, like the chamfering. I had the machine leave rectangular shaped tabs, to keep the parts from moving around while finishing the milling operation.","title":"Computer-Aided Machining"},{"location":"assignments/week07.html#the-shopbot","text":"","title":"The ShopBot"},{"location":"assignments/week07.html#turning-the-machine-on","text":"To be able to turn on the spindle, you need to reset the machine controller. Before you do anything in the Shopbot control software, you need to open Tools -> Spindle control. Otherwise the software can't change the spindle speed as the GCode requires. Then you go into Cuts -> Spindle Warmup Routine, to spin the spindle at two speeds for 10 or 15 minutes, to warm up the bearings in the spindle. You go into Cuts -> Zero Z-axis w/ Zzero plate to zero the Z-axis with a conductive metal plate. Then you press K to get the KeyPad window and use the arrow keys on the keyboard to move the spindle to a convenient point before you go into Zero -> Zero [2] axes (X & Y) in the software. Turning on the shop vac just before starting the job.","title":"Turning the machine on"},{"location":"assignments/week07.html#recalibrating-the-shopbot","text":"When I made a fit test with two slightly different clearances, I discovered that both of them were way off the mark. I asked my instructor \u00de\u00f3rarinn what might cause this and he suggested that the gearboxes on the stepper motors might be wearing out. This may result in the steps per inch value changing. So I modeled a 100 mm by 100 mm square with a circular pocket in the middle. My instructor \u00de\u00f3rarinn suggested that I also chamfer the corners, so that I could make diagonal measurements. The resulting square had 100.5 mm sides. That means that I need to use this scaling factor on the steps per inch value. I can change that value in the Unit Values window in the Shopbot control software. \\[ \\frac{100}{100.5} = 0.9950249 \\] Corrected value: \\[ 0.9950 * 97.7487 = \\underline{97.262389} \\; \\textnormal{steps/inch} \\] steps/inch. Let's change the unit values for the X and Y axes in the ShopBot control software: The X and Y unit values were 97.7487 and then I changed them to 97.2624. Now our ShopBot is more accurate.","title":"Recalibrating the ShopBot"},{"location":"assignments/week07.html#test-pieces","text":"I designed a test fit piece with different clearances in Fusion 360. These fit tests gave me the confidence to finish the design of the coffee corner and mill all the parts in one go. Hexagon fit test pieces. I also did a chamfering test, here's the CAM simulation: I'm glad I also did this test, because I had selected a chamfering tool that was too small. Both are 90\u00b0 bits, but one is bigger than the other. And that makes a difference when you carve this deep.","title":"Test pieces"},{"location":"assignments/week07.html#milling-the-parts-for-the-coffee-corner","text":"It was exciting to do sophisticated milling like this with confidence. I fixed the plywood sheet with screws in the corners and on the middle of the long sides. I took care to tie my hair into a bun and wear hearing protection and safety glasses. First I milled just the chamfers into the plywood sheet. Then I changed from the chamfer tool to the 6 mm flat end mill and milled the dogbone pockets. After that operation it still didn't look like anything recognizable. Finally I milled the outlines and then I recognized the parts. Top view after milling. Dislodging the sawdust with a screwdriver. Vaccuuming the sawdust. Breaking the tab. Breaking the tabs with a chisel. Sanding the edges. I chamfered a few corners before hammering the parts together, to make sure that they sit flush against each other. My instructor \u00de\u00f3rarinn suggested that I take a little off the corners to compensate for the inner corner radius in the piece that gets hammered into these holes. Hammering the parts together. They fit tightly together and don't require any glue. Our new documentation station. I really get in the zone when I sit here and write documentation. I think it's because I feel like I'm in a caf\u00e9, and that's where I get the best concentration. Look how serious I am! The next time I mill something out of wood using Fusion 360, I may need to use a different tab shape. Look at the burn marks after the machine stopped to mill this tab: There are burn marks where all the tabs were. While I was hard at work finishing up the documentation, my instructor \u00de\u00f3rarinn painted the table black and attached it and the shelf to the wall. Bas Withagen in Fab Lab Reykjav\u00edk used to say that if you have time to paint your final project, you're doing it wrong. Fortunately, I have my trusty instructor \u00de\u00f3rarinn, who painted the table black. He's there with his dog S\u00f3la, who was old and had become blind from diabetes. She laid at my desk as I worked on the Fab Academy documentation. S\u00f3la only lived for a week after I took this picture. Such a sweet dog. Our lovely coffee corner with a freshly painted table and \u00de\u00f3rarinn's espresso machine. \u00de\u00f3rarinn commented that now there was really no spot in the lab that wasn't cozy and enticing to sit down and work on a laptop. Download Coffee Corner Fusion 360 model Download 12 mm fit test Fusion 360 model Download 12 mm fit test - more clearance Fusion 360 model Download 12 mm hexagon test Fusion 360 model Download 15 mm fit test Fusion 360 model Download 15 mm chamfer test Fusion 360 model Download rectangle circle test VCarve file Download rectangle circle test - quarter inch bit VCarve file Download rectangle circle test - 22mm MDF VCarve file .md-content__button { display: none; }","title":"Milling the parts for the coffee corner"},{"location":"assignments/week08.html","text":"Electronics Production Vinyl cutting electronics Making a vinyl cutting file in Inkscape I had to go into Inkscape and do a bit of editing of the SVG that I exported out of KiCAD in Electronics Design week . The traces were only single lines, so I couldn't just set the line width to 0.02 mm and start cutting. I used a little trick; I converted the stroke to a path. These lines won't work. Stroke to Path. Now I have lots of lines. Too many, even. But I can work with these. Now I had a lot of intersecting shapes, which I was able to combine using Path -> Union. After that, it was only a bit of cleaning up, since there was an extra rectangle left over on each pad. Path -> Union. Removing duplicate lines. I made the lines red, set the line width to 0.02 mm and exported to PDF (File -> Save As and then select PDF as the output). Cutting the circuit Cutting the circuit didn't go perfectly. Some of the pads came loose when the cutter was cutting holes in them. Checking the design in Inkscape, I found that there was an extra circle on top of every hole. So each hole was cut twice. I also realized that I didn't need to solder pins onto the Xiao module, I could surface-mount it! So I deleted all the holes and cut again. This time the circuit came out perfect. I love making circuits on the vinyl cutter! It's so quick and easy. I found a MicroPython blink program to run on the RP2040. All I had to do was to look at the pinout on the Xiao module to see which GPIO pin I had connected to the LED. That's pin 26. I substituted that into the program and pressed play. It works! Here's the blink program in MicroPython: from machine import Pin , Timer led = machine . Pin ( 26 , machine . Pin . OUT ) timer = Timer () def blink ( timer ): led . toggle () timer . init ( freq = 2.5 , mode = Timer . PERIODIC , callback = blink ) I also tried programming it in C++ in the Arduino IDE. Here is my board with the button working: And the Arduino code: #define BUTTON_PIN 29 #define LED_PIN 26 bool status = 0 ; void setup () { Serial . begin ( 9600 ); pinMode ( BUTTON_PIN , INPUT_PULLUP ); pinMode ( LED_PIN , OUTPUT ); } void loop () { status = digitalRead ( BUTTON_PIN ); Serial . println ( status ); if ( status == 1 ) { digitalWrite ( LED_PIN , LOW ); } else { digitalWrite ( LED_PIN , HIGH ); } delay ( 100 ); } PCB milling I decided to make an LED debugging board. I've wanted something like this for a while. Sometimes you just want to see if something is happening on the output pin that you've defined. Or you're not sure which pin is which. I think an LED board can help when you're figuring out if the problem is in the circuit or in the code. So I designed a board with 30 LEDs, which covers the whole length of my breadboard: Here I'm placing a simple pattern of LEDs, each with a current limiting resistor. Most of the work took place in the PCB layout environment. I needed to rotate every LED individually and align it and its resistor with the others. I had to do a fair bit of rotating and arranging to get all the resistors and LEDs in line. Then I selected File -> Export SVG and opened the SVG file in Inkscape. Unlike the vinyl cutting file, which needs to be a perfect SVG, what I'm using now is a PNG. So I only need to set the colors of the board and traces right, and that's it! Export to PNG with 1000 dots per inch resolution. The production files are simple PNG images. You can save these and load them into Fab Modules to make your own LED test board. The left one (traces) is milled with a 1/64\" bit and the right one (interior) is milled with a 1/32\" bit to cut the outline of the board. To mill the traces I took the milling bit out of the collet and put the 1/64 inch bit in. Initially the bit should only be poking a little bit out of the collet. When over the PCB and the Z-axis is in zero position, I loosened the milling bit, taking care to keep it from dropping and breaking. Then I lower the bit onto the PCB, thereby zeroing it. I push it gently down with one finger while I tighten the set screw, otherwise the screw can lift the bit slightly as I fasten it. The traces PNG is loaded in Fab Modules, RML code for Roland Modela mills is selected and PCB traces is selected. We have Fab Modules running locally on an ancient Linux machine and I don't know how to take screenshots on it. Sorry. Then I select the MDX-20 milling machine, click Calculate to generate the toolpath and click Send to send the RML code to the machine. Starting to mill the traces on the Roland MDX-20 machine. Vacuuming the dust away after milling. Fab Modules selections for milling the PCB traces. Generating the toolpath to mill the PCB outline. The interior PNG was loaded this time and PCB outline selected instead of PCB traces. Did you know you can rotate the view in 3D in Fab Modules? It's great! Milling the board outline with a 1/32 inch bit. I changed the bit with the same procedure as before. After another round of vacuuming, the board is ready for soldering! Soldering, then stopping to test the resistor sizing. First I tried a 1kOhm resistor, which made the LED too dim. Then I tried a 10Ohm resistor, which was just right. Then I soldered the rest of the resistors and LEDs onto the board. I enjoyed the process, but if I were to make many of these boards, I would start thinking about a Pick-and-Place machine. The LumenPNP seems nice. LED debugging I used the LED to troubleshoot my final project. First I made a simple program that blinks all the pins. It helped me to determine which pin on the IC relates to which pin number in the Arduino IDE, using the Arduino Nano 33 IoT board definition (because that's the only Arduino core that worked with my libraries). Blink all the pins! I noticed gaps, where a few pins weren't broken out on the board. I could identify them by slowing the blinking down and having the SAMD21 report the pin number to the serial monitor. Then, when the brushless motor was moving erratically, I added the LED board to the breadboard again to the breadboard to see if I was getting PWM signals on all three motor phases: I got a steady ENABLE signal and two PWM signals. There should also be a PWM signal on the brown wire. I had accidentally used a pin that wasn't broken out on the board for the third PWM signal. I quickly changed the pin in the code and the motor spun smoothly. See more in Final Project: Electronics Design . PCB milling test 1/64th inch bit I made Neil Gershenfeld's PCB milling test using the 1/64th inch flat end mill. I used a local instance of the old Fab Modules running on a Debian Linux laptop. I used the standard speed of 4 mm/s and a cut depth of 0.1 mm. Under \"number of offsets\" I put -1, which means that I want to clear the board completely of copper around the traces. The toolpaths in Fab Modules. I wonder how Neil programmed this from scratch. I think these traces came out rather well The depth of cut seems to be good since the copper is completely removed. The edges are relatively smooth, so the milling bit must be in good condition. The very finest traces came loose from the board. That's good to know. It seems to be best not to make traces thinner than 0.3 mm. 0.01 inch bit Then I tried the super thin 0.01 inch flat end mill, and I must admit that I forgot to change the milling speed. So the first attempt was at a fast pace of 4 mm/s. The end mill broke immediately. Then I tried again at a slow speed of 0.5 mm/s and the same cut depth 0.1 mm. It also broke quite quickly. This was frustrating. There are more offsets, since the milling bit is thinner. I broke two 0.01 inch milling bits trying to mill this test file. Eventual success with fine traces I waited until the final project to try the 0.01 inch end mill again, then at the very slow speed of 0.1 mm/s. It worked for an hour and then broke in the middle of the night. I documented my frustration in my final project video and in my final project presentation, Neil Gershenfeld mentioned that everything has to be perfect for this milling bit to work. You have to plane the wasteboard, clean the machine, everything has to be just right. And I think I also made the mistake of having it mill all the traces, instead of just around the ICs with the smallest pads. In the end I was able to mill the finest traces on my final project board with a V-bit. Then I cleared the whole board with a 1/64th inch flat end mill and milled the holes and outline with a 1/32 inch flat end mill. Here is the assembled robot joint running a PID control loop: Look at that! It works! .md-content__button { display: none; }","title":"8. Electronics Production"},{"location":"assignments/week08.html#electronics-production","text":"","title":"Electronics Production   "},{"location":"assignments/week08.html#vinyl-cutting-electronics","text":"","title":"Vinyl cutting electronics"},{"location":"assignments/week08.html#making-a-vinyl-cutting-file-in-inkscape","text":"I had to go into Inkscape and do a bit of editing of the SVG that I exported out of KiCAD in Electronics Design week . The traces were only single lines, so I couldn't just set the line width to 0.02 mm and start cutting. I used a little trick; I converted the stroke to a path. These lines won't work. Stroke to Path. Now I have lots of lines. Too many, even. But I can work with these. Now I had a lot of intersecting shapes, which I was able to combine using Path -> Union. After that, it was only a bit of cleaning up, since there was an extra rectangle left over on each pad. Path -> Union. Removing duplicate lines. I made the lines red, set the line width to 0.02 mm and exported to PDF (File -> Save As and then select PDF as the output).","title":"Making a vinyl cutting file in Inkscape"},{"location":"assignments/week08.html#cutting-the-circuit","text":"Cutting the circuit didn't go perfectly. Some of the pads came loose when the cutter was cutting holes in them. Checking the design in Inkscape, I found that there was an extra circle on top of every hole. So each hole was cut twice. I also realized that I didn't need to solder pins onto the Xiao module, I could surface-mount it! So I deleted all the holes and cut again. This time the circuit came out perfect. I love making circuits on the vinyl cutter! It's so quick and easy. I found a MicroPython blink program to run on the RP2040. All I had to do was to look at the pinout on the Xiao module to see which GPIO pin I had connected to the LED. That's pin 26. I substituted that into the program and pressed play. It works! Here's the blink program in MicroPython: from machine import Pin , Timer led = machine . Pin ( 26 , machine . Pin . OUT ) timer = Timer () def blink ( timer ): led . toggle () timer . init ( freq = 2.5 , mode = Timer . PERIODIC , callback = blink ) I also tried programming it in C++ in the Arduino IDE. Here is my board with the button working: And the Arduino code: #define BUTTON_PIN 29 #define LED_PIN 26 bool status = 0 ; void setup () { Serial . begin ( 9600 ); pinMode ( BUTTON_PIN , INPUT_PULLUP ); pinMode ( LED_PIN , OUTPUT ); } void loop () { status = digitalRead ( BUTTON_PIN ); Serial . println ( status ); if ( status == 1 ) { digitalWrite ( LED_PIN , LOW ); } else { digitalWrite ( LED_PIN , HIGH ); } delay ( 100 ); }","title":"Cutting the circuit"},{"location":"assignments/week08.html#pcb-milling","text":"I decided to make an LED debugging board. I've wanted something like this for a while. Sometimes you just want to see if something is happening on the output pin that you've defined. Or you're not sure which pin is which. I think an LED board can help when you're figuring out if the problem is in the circuit or in the code. So I designed a board with 30 LEDs, which covers the whole length of my breadboard: Here I'm placing a simple pattern of LEDs, each with a current limiting resistor. Most of the work took place in the PCB layout environment. I needed to rotate every LED individually and align it and its resistor with the others. I had to do a fair bit of rotating and arranging to get all the resistors and LEDs in line. Then I selected File -> Export SVG and opened the SVG file in Inkscape. Unlike the vinyl cutting file, which needs to be a perfect SVG, what I'm using now is a PNG. So I only need to set the colors of the board and traces right, and that's it! Export to PNG with 1000 dots per inch resolution. The production files are simple PNG images. You can save these and load them into Fab Modules to make your own LED test board. The left one (traces) is milled with a 1/64\" bit and the right one (interior) is milled with a 1/32\" bit to cut the outline of the board. To mill the traces I took the milling bit out of the collet and put the 1/64 inch bit in. Initially the bit should only be poking a little bit out of the collet. When over the PCB and the Z-axis is in zero position, I loosened the milling bit, taking care to keep it from dropping and breaking. Then I lower the bit onto the PCB, thereby zeroing it. I push it gently down with one finger while I tighten the set screw, otherwise the screw can lift the bit slightly as I fasten it. The traces PNG is loaded in Fab Modules, RML code for Roland Modela mills is selected and PCB traces is selected. We have Fab Modules running locally on an ancient Linux machine and I don't know how to take screenshots on it. Sorry. Then I select the MDX-20 milling machine, click Calculate to generate the toolpath and click Send to send the RML code to the machine. Starting to mill the traces on the Roland MDX-20 machine. Vacuuming the dust away after milling. Fab Modules selections for milling the PCB traces. Generating the toolpath to mill the PCB outline. The interior PNG was loaded this time and PCB outline selected instead of PCB traces. Did you know you can rotate the view in 3D in Fab Modules? It's great! Milling the board outline with a 1/32 inch bit. I changed the bit with the same procedure as before. After another round of vacuuming, the board is ready for soldering! Soldering, then stopping to test the resistor sizing. First I tried a 1kOhm resistor, which made the LED too dim. Then I tried a 10Ohm resistor, which was just right. Then I soldered the rest of the resistors and LEDs onto the board. I enjoyed the process, but if I were to make many of these boards, I would start thinking about a Pick-and-Place machine. The LumenPNP seems nice.","title":"PCB milling"},{"location":"assignments/week08.html#led-debugging","text":"I used the LED to troubleshoot my final project. First I made a simple program that blinks all the pins. It helped me to determine which pin on the IC relates to which pin number in the Arduino IDE, using the Arduino Nano 33 IoT board definition (because that's the only Arduino core that worked with my libraries). Blink all the pins! I noticed gaps, where a few pins weren't broken out on the board. I could identify them by slowing the blinking down and having the SAMD21 report the pin number to the serial monitor. Then, when the brushless motor was moving erratically, I added the LED board to the breadboard again to the breadboard to see if I was getting PWM signals on all three motor phases: I got a steady ENABLE signal and two PWM signals. There should also be a PWM signal on the brown wire. I had accidentally used a pin that wasn't broken out on the board for the third PWM signal. I quickly changed the pin in the code and the motor spun smoothly. See more in Final Project: Electronics Design .","title":"LED debugging"},{"location":"assignments/week08.html#pcb-milling-test","text":"","title":"PCB milling test"},{"location":"assignments/week08.html#164th-inch-bit","text":"I made Neil Gershenfeld's PCB milling test using the 1/64th inch flat end mill. I used a local instance of the old Fab Modules running on a Debian Linux laptop. I used the standard speed of 4 mm/s and a cut depth of 0.1 mm. Under \"number of offsets\" I put -1, which means that I want to clear the board completely of copper around the traces. The toolpaths in Fab Modules. I wonder how Neil programmed this from scratch. I think these traces came out rather well The depth of cut seems to be good since the copper is completely removed. The edges are relatively smooth, so the milling bit must be in good condition. The very finest traces came loose from the board. That's good to know. It seems to be best not to make traces thinner than 0.3 mm.","title":"1/64th inch bit"},{"location":"assignments/week08.html#001-inch-bit","text":"Then I tried the super thin 0.01 inch flat end mill, and I must admit that I forgot to change the milling speed. So the first attempt was at a fast pace of 4 mm/s. The end mill broke immediately. Then I tried again at a slow speed of 0.5 mm/s and the same cut depth 0.1 mm. It also broke quite quickly. This was frustrating. There are more offsets, since the milling bit is thinner. I broke two 0.01 inch milling bits trying to mill this test file.","title":"0.01 inch bit"},{"location":"assignments/week08.html#eventual-success-with-fine-traces","text":"I waited until the final project to try the 0.01 inch end mill again, then at the very slow speed of 0.1 mm/s. It worked for an hour and then broke in the middle of the night. I documented my frustration in my final project video and in my final project presentation, Neil Gershenfeld mentioned that everything has to be perfect for this milling bit to work. You have to plane the wasteboard, clean the machine, everything has to be just right. And I think I also made the mistake of having it mill all the traces, instead of just around the ICs with the smallest pads. In the end I was able to mill the finest traces on my final project board with a V-bit. Then I cleared the whole board with a 1/64th inch flat end mill and milled the holes and outline with a 1/32 inch flat end mill. Here is the assembled robot joint running a PID control loop: Look at that! It works! .md-content__button { display: none; }","title":"Eventual success with fine traces"},{"location":"assignments/week09.html","text":"Output Devices Neil's board I first made Neil Gershenfeld's Hello H-Bridge D11C board without modifications. I simply downloaded the traces and interior PNG files and used them to generate G-code to mill the board. After milling, I removed the extra copper around the USB connector, to avoid a short-circuit. Neil's board in Fab Modules. I like soldering. Maybe you can tell. I like the fact that the SAMD11 is a capable ARM microcontroller that still comes in a package that is easy to solder. After soldering, I plugged the USB connector on the board into my computer and hooked the SWD pins up to the Atmel-ICE programmer. I followed this tutorial from Fab Lab Kannai to try to upload a bootloader to the chip. Trying to program the ATSAMD11C with the official ATMEL-ICE programmer. The microcontroller was not detected. The Microchip Studio software couldn't find a connected device. I tried again, and got a low voltage warning. It said that the operating voltage was 1.5 volts but needed to be 1.6 to 3.8 volts. Well, that's an improvement! Now I have a useful error message that I may be able to do something about. Later. At least the computer is detecting something. Stepper control board Fusion 360 PCB export woes For my own design, I decided to go an easier route. I used the Xiao RP2040 module, which I know how to program. After the incredible fireworks show in the Monday recitation , where the Fab Lab machine builders showed their projects, I looked into the Urumbu boards and Modular Things. Urumbu is based on work at a Fab event in Kerala, where it was discovered that you can connect several stepper control boards directly to a computer and send them synced commands via USB and have them act as a single machine. Modular Things grew out of that project, and they include a convenient web editor to program the boards with a few lines of JavaScript. I looked at the Urumbu boards and found that they used the DRV8428 stepper controllers. We only have two of them, and they are currently unavailable at Digikey. However, the latest Modular Things boards use a Xiao RP2040 for control, which I'm comfortable with, and the stepper board uses two A4950 motor controllers, which we have at our Lab. Alright! These boards are designed in Fusion 360 and I want to make some modifications to it. I opened the stepper board in Fusion 360 but I couldn't make heads or tails of the interface of the electronics environment. So I started going through a friendly five video tutorial series on Fusion 360 PCB design and milling. The first video covers making a new design, getting the libraries you need and opening the PCB layout. The video was made only a year ago but the interface has already changed since then. But I was able to follow it. In the second video you make your own library component from scratch. I needed to change the default grid units from mils to millimeters first. That is a global setting, so I should now see millimeters everywhere in the electronics design environment. In the third video, you make a more complicated component from the manufacturer's 3D CAD file and technical drawing. When I made a new component, I had to set the grid units to mm again. Annoying. I followed step 5 on this Fab Academy site to export the circuit as a PNG. That worked well for the traces, but no matter what I tried, I couldn't export the outline of the board. It's always visible in the viewport, even when I turn off all the layers. So instead, I tried opening the 3D PCB and exporting the PCB sketch as a DXF and then turning that into a black and white SVG in Inkscape. That works, except I need to draw a frame around it, so that the milling bit has space to traverse the whole outline. But then, how do I make the frame for the traces? I tried to export them as a DXF, but that didn't work. For that, I would need to create another sketch inside the 3D PCB environment and project the traces into it, but that environment only allows you to make one sketch. Then I tried to make an engineering drawing. Only the board outline and the pads appeared but not the traces. And not the frame around the board, because it only exists in a sketch. Then I changed the settings for the model view from Visible Lines to Visible with Hidden Edges, and the traces appeared! But they had broken lines. So I right-clicked the Document Settings at the top of the model tree on the left and looked at the View settings. The hidden lines were drawn with HIDDEN2. I changed that to Continuous, and now I had nice and continuous traces. I exported the drawing as a PDF and opened it in Inkscape. I deleted the CAD drawing frame, which I don't need to make production files. Now I just needed to do some editing to combine the traces with the pads and make one file with only the traces and another one with only the holes and the board outline. I made all the lines red and 0.02 mm wide, just because that's what we usually do before laser cutting and vinyl cutting. I'm used to looking at things like that. I tried turning on Fill, but that didn't come out well. So I had to do some editing. I selected Object -> Ungroup twice to separate all the lines. I thought that I was getting close to production. But a lot of the lines weren't connected. Sigh. I can't use this. Next I tried exporting the CAD drawing as a DXF and opened it in Inkscape, in the hope that it would be more precise. It is more precise, but the lines are still all seperate. The pads and traces aren't objects that I can combine. I tried turning Fill on, but had no better luck than with the PDF. To make the background, I made a rectangle with black fill and no stroke, and had it snap to the corners of the broken sketch outline. Nicely milled stepper control board. When I was looking through the Arduino code for the stepper H-bridge RP2040, I found the pin that sends a reference voltage to the Toshiba driver. It was in the stepperDriver.cpp file. Connected to the pin was a somewhat cryptic slice_num variable, but from the RP2040 datasheet I remembered that the PWM generators are called slices. From the following lines of code, it seemed that the PWM duty cycle was 15/128, or 12%: cpp // PWM duty cycle over 128 pwm_set_chan_level(slice_num_a, channel_a, 15); pwm_set_chan_level(slice_num_b, channel_b, 15); If I assume that the maximum output voltage of the RP2040 is 3.3V, then 12% of that is 0,396V. I can try to implement this with the Arduino library function analogWrite(pin, value), where value takes a number from 0 (always off) to 255 (always on). 12% of 255 is 31. The DC motor hummed, but didn't move. So I tried 50/255. Then it moved a tiny bit. Next, I tried 80/255. The motor spins! And I don't seem to be overloading the USB port. But the motor is very weak. Let's try 100. Now 120. Now 150. Now 180. Now 200. Now 220. I won't dare to go above that for now. Two DC motors, VREF 80. Works! Let's try 120. I tried 150, 180 and finally 220. I also tried PWM on both motors at the same time. That worked well. Now I'm gearing up for a BLDC motor. But that requires a boost converter to get the voltage from 5 to 12 V. Final project spiral 1 Then I tried Yuichi Tamiya's Modular Things stepper board. Yuichi Tamiya's Modular Things stepper board from the 2023 instructor bootcamp in Amsterdam. I heavily modified Yuichi's board, changing the shape and adding header pins, so that I could use all the Xiao RP2040's pins. I can now connect the brushless motor to the two H-bridges (it needs one and a half H-bridge) and I can connect the encoder to 3.3 V, ground and a digital pin on the Xiao. I bought two sizes of brushless motors, thinking that I would use a bigger motor in the base. I based the shape of the board on that. A few days after I designed the shape, I decided to change the orientation of the arm from vertical (regular robot arm) to horizontal (SCARA arm). Then there's no strain on the motors when the arm is stationary and I don't need to use the bigger and more expensive brushless motor. I also decided to put a stepper motor in the base of the first version of the robot, simply because I had managed to make a working stepper RP2040 Modular Thing. The Autorouter in KiCAD. My PCB design My robot joint v1 PCB. I also added one header pin to the current sense resistor, hoping that I can read the current going into the motor. That would be very useful, because it's a way to prevent the H-bridges from overheating (I burned a motor driver on a commercial robot arm once and I want to make it impossible on my arm) and I can also use the measured current as a way to measure the force on the joint. Current sensing is not available on any hobby robot that I know of, so if this works, then it will be a great feature! I also added a 7-11V power input for the brushless motor. Yuichi's stepper board uses the 5V USB pin to power the stepper, but my brushless motor needs a higher voltage. I will just be using a lab power supply for now. I will figure out the arm's power supply later. Does it make sense to add a boost converter? I don't know, converting 230V AC into 5V and then converting 5V into 11V sounds a bit messy to me. Putting Dupont connectors on the motor The power connector that came with the motor is too small for the standard 2.54 mm pin headers in the Fab Lab Inventory, so my instructor \u00de\u00f3rarinn showed me how to crimp Dupont connectors onto the wires. Part 1 Part 2 Part 3 Part 4 Part 5 Small connector. \u00de\u00f3rarinn's Dupont connector kit. Aligning a female Dupont connector to the wire. The first crimp connection grabs the plastic cover and the second one grabs the bare wire and secures an electrical connection. Crimping the connector onto the wire. More recently I've started to use narrow nose pliers instead. Then I can control exactly how the crimping goes and I don't waste as many Dupont connectors. Triple Dupont connector, ready for service. PCB production Under number of offsets (off screen) I typed -1, to have the milling machine clear all the excess copper off the board. I thought this was the safest move, since I'll be putting a BLDC motor with an aluminum chassis onto the board. That's a nice-looking board. The components for arm joint v1, with a general comment on component labeling. Scaling problem The holes for the brushless motor screws were too far apart. How could that be? I exported the arm profile with the holes directly to DXF from Fusion 360, imported them into KiCAD and then exported to SVG without modifications. My instructor \u00de\u00f3rarinn suggested that my DPI settings in Inkscape and Fab Modules might be off. If you check the Fab Modules image, you'll see that the resolution was automatically set to 999.99 dots per inch, instead of 1000. Oh no, torn motor pins! I tore the motor pins off the board when I was trying to insert the connector. The copper also came off the board. This was a design lesson: you have to put the connectors all the way at the edge of the board! I don't know what I was thinking. This was very frustrating. I had to stop working, cool off and come back the next day. With a level head, I thought that I might actually be able to save this board using the adhesive-backed copper sheet that I use on the vinyl cutter. The fix Part 1 Part 2 Part 3 Part 4 Part 5 First I cut the MOTOR OUTPUT and CURRENT SENSE letters off the board with a box cutter. Then I tried cutting a strip of copper sheet and I successfully glued it onto the board. Copper sheet added for the other three motor phases. Then I carefully soldered the horizontal header pins onto the copper sheet and made a solder bridge from the sheets to the traces on the board. Finally I added some hot glue to add a little bit of strength. Robot base The 3D printed base for spiral 1 of my robot arm. The support material came easily away from the part in one piece. Neat! Here's the Stepper RP2040 Modular Thing that I made for the stepper in the base of the arm. Look closely and you'll see the tiny white TPU washers that I made avoid making contact between the screws and the traces. Stepper control with my board Driving a stepper from the Modular Things web interface using my arm joint control board. When testing my arm joint v1 with a stepper motor, I accidentally ripped the stepper motor pin header off the board and took some of the traces along with it. A current sense header pin also fell off the board. I decied to call it quits with making stuff for the day, went to the Heimabygg\u00f0 coffe house and wrote up my experiences. With fresh eyes (and a fresh espresso) at the lab the next morning, I thought of a way to fix the board. I would cut strips of adhesive-backed copper sheet and glue new traces onto the board. I soldered them to the remains of the old traces on one end and to the header pins on the other end, and after a bit of troubleshooting, the board worked! I've tried 247, 427, 274, 472, 742, 724 - that covers all possible \\(3! = 6\\) combinations. I'm getting PWM output on Xiao pins 0, 2 and 4. Now I know that the right pins are 7, 2 and 4. I get good PWM output for the motor from pins 2 and 4 but I get the strange sawtooth output from pin 7. LED PWM test Before trying to move the brushless motor, I checked whether I was getting a sinusoidal PWM on three output pins. I did this by outputting the motor control signals to the RGB LED that is built into the Xiao RP2040 board: Seems to be working! BLDC control with my board Robot arm spiral 1. Here I'm controlling the BLDC with sinusoidal PWM signals: Getting some erratic behavior. This code worked with the L298N stepper driver. After trying a few different speeds and voltages, I finally got the motor to spin around in a circle in the last shot. Debugging The brushless motor moved erratically no matter what I tried. I wondered if I had soldered the wrong capacitors onto the board. I tried to measure them with a component tester: Trying to measure a capacitor with a component tester. I couldn't get a reading with the component tester. Eventually I decided that I must have put the right capacitors on the board because I was so systematic and methodical in soldering the board. Finally, I tried lowering the power supply voltage to 5V. The motor still worked. Then I switched the motor over to Yuichi's Modular Things stepper driver and found erratic behavior there too. It seems that this Toshiba motor driver just doesn't cut it. I then connected the motor to the ancient L298N double H-bridge and it worked! OK, so the Toshiba H-bridge is out and I need to look for an alternative. Looking at the signals from the H-bridges. The board can control a stepper just fine. When trying to control a brushless motor, one H-bridge is a problem. It's the one that has only one pin connected. It seems that these motor drivers don't have independent half-H-bridges, which is what I need for brushless motor control. I'm going to abandon this board. I also noticed a lot of compliance in the structure. It seems to stem mostly from the stepper coupling that I designed. This is something I can improve in the next spiral. See the arm bending here: Measuring the power of an output device I measured the power use of an OLED screen. First, I measured the voltage over the component. That means that one lead of the multimeter is on the \"hot\" side of the component and the other lead is connected to ground on the other side. The OLED must be powered on for the voltage measurement to work. Measuring the voltage that the OLED screen gets. Then I measured the current that the OLED screen uses. I needed to break the circuit and insert the multimeter into the circuit on the \"hot\" side, in order to measure the current flowing through the OLED. Inside the multimeter is a resistor with very low resistance. The multimeter measures the voltage drop over the resistor and uses that value and the resistance to calculate the current using Ohm's Law. On the left, the potentiometer is turned all the way down, so the bar is black. On the right the pot is turned all the way up, so the bar is white. There is a clear difference in the current reading. Measuring roughly the maximum current that the OLED screen uses. About 90% of the OLED screen is is illuminated here. To calculate the power consumption, I'll use the power formula: \\[ \\mathrm{P} = \\mathrm{V}\\!\\cdot\\!\\mathrm{I}\\] Potentiometer set to 0%: \\[\\mathrm{P_{0\\%}}=\\mathrm{V}\\!\\cdot\\!\\mathrm{I_{0\\%}}=4.6V\\!\\cdot\\!0.004A=\\underline{0.0184W}\\] Potentiometer set to 100%: \\[\\mathrm{P_{100\\%}} = \\mathrm{V}\\!\\cdot\\!\\mathrm{I_{100\\%}} = 4.6V\\!\\cdot\\!0.008A = \\underline{0.0368W}\\] Maximum OLED power consumption: \\[\\mathrm{P_{max}} = \\mathrm{V}\\!\\cdot\\!\\mathrm{I_{max}} = 4.6V\\!\\cdot\\!0.024A = \\underline{0.1104W}\\] .md-content__button { display: none; }","title":"9. Output Devices"},{"location":"assignments/week09.html#output-devices","text":"","title":"Output Devices   "},{"location":"assignments/week09.html#neils-board","text":"I first made Neil Gershenfeld's Hello H-Bridge D11C board without modifications. I simply downloaded the traces and interior PNG files and used them to generate G-code to mill the board. After milling, I removed the extra copper around the USB connector, to avoid a short-circuit. Neil's board in Fab Modules. I like soldering. Maybe you can tell. I like the fact that the SAMD11 is a capable ARM microcontroller that still comes in a package that is easy to solder. After soldering, I plugged the USB connector on the board into my computer and hooked the SWD pins up to the Atmel-ICE programmer. I followed this tutorial from Fab Lab Kannai to try to upload a bootloader to the chip. Trying to program the ATSAMD11C with the official ATMEL-ICE programmer. The microcontroller was not detected. The Microchip Studio software couldn't find a connected device. I tried again, and got a low voltage warning. It said that the operating voltage was 1.5 volts but needed to be 1.6 to 3.8 volts. Well, that's an improvement! Now I have a useful error message that I may be able to do something about. Later. At least the computer is detecting something.","title":"Neil's board"},{"location":"assignments/week09.html#stepper-control-board","text":"","title":"Stepper control board"},{"location":"assignments/week09.html#fusion-360-pcb-export-woes","text":"For my own design, I decided to go an easier route. I used the Xiao RP2040 module, which I know how to program. After the incredible fireworks show in the Monday recitation , where the Fab Lab machine builders showed their projects, I looked into the Urumbu boards and Modular Things. Urumbu is based on work at a Fab event in Kerala, where it was discovered that you can connect several stepper control boards directly to a computer and send them synced commands via USB and have them act as a single machine. Modular Things grew out of that project, and they include a convenient web editor to program the boards with a few lines of JavaScript. I looked at the Urumbu boards and found that they used the DRV8428 stepper controllers. We only have two of them, and they are currently unavailable at Digikey. However, the latest Modular Things boards use a Xiao RP2040 for control, which I'm comfortable with, and the stepper board uses two A4950 motor controllers, which we have at our Lab. Alright! These boards are designed in Fusion 360 and I want to make some modifications to it. I opened the stepper board in Fusion 360 but I couldn't make heads or tails of the interface of the electronics environment. So I started going through a friendly five video tutorial series on Fusion 360 PCB design and milling. The first video covers making a new design, getting the libraries you need and opening the PCB layout. The video was made only a year ago but the interface has already changed since then. But I was able to follow it. In the second video you make your own library component from scratch. I needed to change the default grid units from mils to millimeters first. That is a global setting, so I should now see millimeters everywhere in the electronics design environment. In the third video, you make a more complicated component from the manufacturer's 3D CAD file and technical drawing. When I made a new component, I had to set the grid units to mm again. Annoying. I followed step 5 on this Fab Academy site to export the circuit as a PNG. That worked well for the traces, but no matter what I tried, I couldn't export the outline of the board. It's always visible in the viewport, even when I turn off all the layers. So instead, I tried opening the 3D PCB and exporting the PCB sketch as a DXF and then turning that into a black and white SVG in Inkscape. That works, except I need to draw a frame around it, so that the milling bit has space to traverse the whole outline. But then, how do I make the frame for the traces? I tried to export them as a DXF, but that didn't work. For that, I would need to create another sketch inside the 3D PCB environment and project the traces into it, but that environment only allows you to make one sketch. Then I tried to make an engineering drawing. Only the board outline and the pads appeared but not the traces. And not the frame around the board, because it only exists in a sketch. Then I changed the settings for the model view from Visible Lines to Visible with Hidden Edges, and the traces appeared! But they had broken lines. So I right-clicked the Document Settings at the top of the model tree on the left and looked at the View settings. The hidden lines were drawn with HIDDEN2. I changed that to Continuous, and now I had nice and continuous traces. I exported the drawing as a PDF and opened it in Inkscape. I deleted the CAD drawing frame, which I don't need to make production files. Now I just needed to do some editing to combine the traces with the pads and make one file with only the traces and another one with only the holes and the board outline. I made all the lines red and 0.02 mm wide, just because that's what we usually do before laser cutting and vinyl cutting. I'm used to looking at things like that. I tried turning on Fill, but that didn't come out well. So I had to do some editing. I selected Object -> Ungroup twice to separate all the lines. I thought that I was getting close to production. But a lot of the lines weren't connected. Sigh. I can't use this. Next I tried exporting the CAD drawing as a DXF and opened it in Inkscape, in the hope that it would be more precise. It is more precise, but the lines are still all seperate. The pads and traces aren't objects that I can combine. I tried turning Fill on, but had no better luck than with the PDF. To make the background, I made a rectangle with black fill and no stroke, and had it snap to the corners of the broken sketch outline. Nicely milled stepper control board. When I was looking through the Arduino code for the stepper H-bridge RP2040, I found the pin that sends a reference voltage to the Toshiba driver. It was in the stepperDriver.cpp file. Connected to the pin was a somewhat cryptic slice_num variable, but from the RP2040 datasheet I remembered that the PWM generators are called slices. From the following lines of code, it seemed that the PWM duty cycle was 15/128, or 12%: cpp // PWM duty cycle over 128 pwm_set_chan_level(slice_num_a, channel_a, 15); pwm_set_chan_level(slice_num_b, channel_b, 15); If I assume that the maximum output voltage of the RP2040 is 3.3V, then 12% of that is 0,396V. I can try to implement this with the Arduino library function analogWrite(pin, value), where value takes a number from 0 (always off) to 255 (always on). 12% of 255 is 31. The DC motor hummed, but didn't move. So I tried 50/255. Then it moved a tiny bit. Next, I tried 80/255. The motor spins! And I don't seem to be overloading the USB port. But the motor is very weak. Let's try 100. Now 120. Now 150. Now 180. Now 200. Now 220. I won't dare to go above that for now. Two DC motors, VREF 80. Works! Let's try 120. I tried 150, 180 and finally 220. I also tried PWM on both motors at the same time. That worked well. Now I'm gearing up for a BLDC motor. But that requires a boost converter to get the voltage from 5 to 12 V.","title":"Fusion 360 PCB export woes"},{"location":"assignments/week09.html#final-project-spiral-1","text":"Then I tried Yuichi Tamiya's Modular Things stepper board. Yuichi Tamiya's Modular Things stepper board from the 2023 instructor bootcamp in Amsterdam. I heavily modified Yuichi's board, changing the shape and adding header pins, so that I could use all the Xiao RP2040's pins. I can now connect the brushless motor to the two H-bridges (it needs one and a half H-bridge) and I can connect the encoder to 3.3 V, ground and a digital pin on the Xiao. I bought two sizes of brushless motors, thinking that I would use a bigger motor in the base. I based the shape of the board on that. A few days after I designed the shape, I decided to change the orientation of the arm from vertical (regular robot arm) to horizontal (SCARA arm). Then there's no strain on the motors when the arm is stationary and I don't need to use the bigger and more expensive brushless motor. I also decided to put a stepper motor in the base of the first version of the robot, simply because I had managed to make a working stepper RP2040 Modular Thing. The Autorouter in KiCAD.","title":"Final project spiral 1"},{"location":"assignments/week09.html#my-pcb-design","text":"My robot joint v1 PCB. I also added one header pin to the current sense resistor, hoping that I can read the current going into the motor. That would be very useful, because it's a way to prevent the H-bridges from overheating (I burned a motor driver on a commercial robot arm once and I want to make it impossible on my arm) and I can also use the measured current as a way to measure the force on the joint. Current sensing is not available on any hobby robot that I know of, so if this works, then it will be a great feature! I also added a 7-11V power input for the brushless motor. Yuichi's stepper board uses the 5V USB pin to power the stepper, but my brushless motor needs a higher voltage. I will just be using a lab power supply for now. I will figure out the arm's power supply later. Does it make sense to add a boost converter? I don't know, converting 230V AC into 5V and then converting 5V into 11V sounds a bit messy to me.","title":"My PCB design"},{"location":"assignments/week09.html#putting-dupont-connectors-on-the-motor","text":"The power connector that came with the motor is too small for the standard 2.54 mm pin headers in the Fab Lab Inventory, so my instructor \u00de\u00f3rarinn showed me how to crimp Dupont connectors onto the wires. Part 1 Part 2 Part 3 Part 4 Part 5 Small connector. \u00de\u00f3rarinn's Dupont connector kit. Aligning a female Dupont connector to the wire. The first crimp connection grabs the plastic cover and the second one grabs the bare wire and secures an electrical connection. Crimping the connector onto the wire. More recently I've started to use narrow nose pliers instead. Then I can control exactly how the crimping goes and I don't waste as many Dupont connectors. Triple Dupont connector, ready for service.","title":"Putting Dupont connectors on the motor"},{"location":"assignments/week09.html#pcb-production","text":"Under number of offsets (off screen) I typed -1, to have the milling machine clear all the excess copper off the board. I thought this was the safest move, since I'll be putting a BLDC motor with an aluminum chassis onto the board. That's a nice-looking board. The components for arm joint v1, with a general comment on component labeling.","title":"PCB production"},{"location":"assignments/week09.html#scaling-problem","text":"The holes for the brushless motor screws were too far apart. How could that be? I exported the arm profile with the holes directly to DXF from Fusion 360, imported them into KiCAD and then exported to SVG without modifications. My instructor \u00de\u00f3rarinn suggested that my DPI settings in Inkscape and Fab Modules might be off. If you check the Fab Modules image, you'll see that the resolution was automatically set to 999.99 dots per inch, instead of 1000.","title":"Scaling problem"},{"location":"assignments/week09.html#oh-no-torn-motor-pins","text":"I tore the motor pins off the board when I was trying to insert the connector. The copper also came off the board. This was a design lesson: you have to put the connectors all the way at the edge of the board! I don't know what I was thinking. This was very frustrating. I had to stop working, cool off and come back the next day. With a level head, I thought that I might actually be able to save this board using the adhesive-backed copper sheet that I use on the vinyl cutter.","title":"Oh no, torn motor pins!"},{"location":"assignments/week09.html#the-fix","text":"Part 1 Part 2 Part 3 Part 4 Part 5 First I cut the MOTOR OUTPUT and CURRENT SENSE letters off the board with a box cutter. Then I tried cutting a strip of copper sheet and I successfully glued it onto the board. Copper sheet added for the other three motor phases. Then I carefully soldered the horizontal header pins onto the copper sheet and made a solder bridge from the sheets to the traces on the board. Finally I added some hot glue to add a little bit of strength.","title":"The fix"},{"location":"assignments/week09.html#robot-base","text":"The 3D printed base for spiral 1 of my robot arm. The support material came easily away from the part in one piece. Neat! Here's the Stepper RP2040 Modular Thing that I made for the stepper in the base of the arm. Look closely and you'll see the tiny white TPU washers that I made avoid making contact between the screws and the traces.","title":"Robot base"},{"location":"assignments/week09.html#stepper-control-with-my-board","text":"Driving a stepper from the Modular Things web interface using my arm joint control board. When testing my arm joint v1 with a stepper motor, I accidentally ripped the stepper motor pin header off the board and took some of the traces along with it. A current sense header pin also fell off the board. I decied to call it quits with making stuff for the day, went to the Heimabygg\u00f0 coffe house and wrote up my experiences. With fresh eyes (and a fresh espresso) at the lab the next morning, I thought of a way to fix the board. I would cut strips of adhesive-backed copper sheet and glue new traces onto the board. I soldered them to the remains of the old traces on one end and to the header pins on the other end, and after a bit of troubleshooting, the board worked! I've tried 247, 427, 274, 472, 742, 724 - that covers all possible \\(3! = 6\\) combinations. I'm getting PWM output on Xiao pins 0, 2 and 4. Now I know that the right pins are 7, 2 and 4. I get good PWM output for the motor from pins 2 and 4 but I get the strange sawtooth output from pin 7.","title":"Stepper control with my board"},{"location":"assignments/week09.html#led-pwm-test","text":"Before trying to move the brushless motor, I checked whether I was getting a sinusoidal PWM on three output pins. I did this by outputting the motor control signals to the RGB LED that is built into the Xiao RP2040 board: Seems to be working!","title":"LED PWM test"},{"location":"assignments/week09.html#bldc-control-with-my-board","text":"Robot arm spiral 1. Here I'm controlling the BLDC with sinusoidal PWM signals: Getting some erratic behavior. This code worked with the L298N stepper driver. After trying a few different speeds and voltages, I finally got the motor to spin around in a circle in the last shot.","title":"BLDC control with my board"},{"location":"assignments/week09.html#debugging","text":"The brushless motor moved erratically no matter what I tried. I wondered if I had soldered the wrong capacitors onto the board. I tried to measure them with a component tester: Trying to measure a capacitor with a component tester. I couldn't get a reading with the component tester. Eventually I decided that I must have put the right capacitors on the board because I was so systematic and methodical in soldering the board. Finally, I tried lowering the power supply voltage to 5V. The motor still worked. Then I switched the motor over to Yuichi's Modular Things stepper driver and found erratic behavior there too. It seems that this Toshiba motor driver just doesn't cut it. I then connected the motor to the ancient L298N double H-bridge and it worked! OK, so the Toshiba H-bridge is out and I need to look for an alternative. Looking at the signals from the H-bridges. The board can control a stepper just fine. When trying to control a brushless motor, one H-bridge is a problem. It's the one that has only one pin connected. It seems that these motor drivers don't have independent half-H-bridges, which is what I need for brushless motor control. I'm going to abandon this board. I also noticed a lot of compliance in the structure. It seems to stem mostly from the stepper coupling that I designed. This is something I can improve in the next spiral. See the arm bending here:","title":"Debugging"},{"location":"assignments/week09.html#measuring-the-power-of-an-output-device","text":"I measured the power use of an OLED screen. First, I measured the voltage over the component. That means that one lead of the multimeter is on the \"hot\" side of the component and the other lead is connected to ground on the other side. The OLED must be powered on for the voltage measurement to work. Measuring the voltage that the OLED screen gets. Then I measured the current that the OLED screen uses. I needed to break the circuit and insert the multimeter into the circuit on the \"hot\" side, in order to measure the current flowing through the OLED. Inside the multimeter is a resistor with very low resistance. The multimeter measures the voltage drop over the resistor and uses that value and the resistance to calculate the current using Ohm's Law. On the left, the potentiometer is turned all the way down, so the bar is black. On the right the pot is turned all the way up, so the bar is white. There is a clear difference in the current reading. Measuring roughly the maximum current that the OLED screen uses. About 90% of the OLED screen is is illuminated here. To calculate the power consumption, I'll use the power formula: \\[ \\mathrm{P} = \\mathrm{V}\\!\\cdot\\!\\mathrm{I}\\] Potentiometer set to 0%: \\[\\mathrm{P_{0\\%}}=\\mathrm{V}\\!\\cdot\\!\\mathrm{I_{0\\%}}=4.6V\\!\\cdot\\!0.004A=\\underline{0.0184W}\\] Potentiometer set to 100%: \\[\\mathrm{P_{100\\%}} = \\mathrm{V}\\!\\cdot\\!\\mathrm{I_{100\\%}} = 4.6V\\!\\cdot\\!0.008A = \\underline{0.0368W}\\] Maximum OLED power consumption: \\[\\mathrm{P_{max}} = \\mathrm{V}\\!\\cdot\\!\\mathrm{I_{max}} = 4.6V\\!\\cdot\\!0.024A = \\underline{0.1104W}\\] .md-content__button { display: none; }","title":"Measuring the power of an output device"},{"location":"assignments/week10.html","text":"Machine Week The Icelandic Machine Week team: Svavar, Andri and Hafey. Machine Week was fantastic. Check out our project page here: MACHINE WEEK PAGE For an intense three-day weekend, Andri S\u00e6mundsson , Hafey Viktor\u00eda Hallgr\u00edmsd\u00f3ttir and myself joined forces in Fab Lab Reykjav\u00edk to build a machine. Here's our presentation video: The build process and function of the TeaManator tea steeping machine. But it's better to watch the video with commentary from Hafey: Hafey presenting the TeaManator tea machine to Neil Gershenfeld and the Fab Academy class 2023. Neil liked how the machine is nicely finished and presented. Link to video. The clean aesthetics of the machine are thanks to Hafey, who designed and made the structure on the Shopbot and the laser cutter. She also made the logo on the vinyl cutter, which means that we used all the machines in the Fab Lab! Travel arrangements Our instructors, \u00c1rni Bj\u00f6rnsson and \u00de\u00f3rarinn Bjartur Brei\u00f0fj\u00f6r\u00f0 Gunnarsson came to support us during this intense session of work. \u00de\u00f3rarinn and I flew in from \u00cdsafj\u00f6r\u00f0ur and \u00c1rni flew in from Akureyri. Landing in \u00cdsafj\u00f6r\u00f0ur is more exhilarating than many people would like. \u00cdsafj\u00f6r\u00f0ur is one of the most difficult airports in Iceland. When the plane comes in, it needs to take a sharp U-turn at the bottom of the fjord before landing on the tarmac. People are often quite shocked, and I heard that two of the designers of the Dutch Fokker aircraft that used to fly this route came along for the ride once and commented that the aircraft was not designed for this kind of thing. If there's even a little bit of side wind toward the mountain, the flight is cancelled. Fortunately, the weather gods smiled upon us this time. Fab Lab Reykjav\u00edk On arriving in Fab Lab Reykjav\u00edk, I discovered that I was one of the orphaned 3D scans on display in the lobby. No long hair or beard back then. I made this with a Kinect 3D scanner many years ago with the help of Linda and Bas Withagen, soon after Fab Lab Reykjav\u00edk first opened. Then the Fab Lab was in Eddufell, in a small commercial center. I spent a lot of time there right after it opened. Good times. I wanted to take many more pictures inside Fab Lab Reykjav\u00edk, because they have designed and fabricated all sorts of clever solutions that I would like to replicate in Fab Lab \u00cdsafj\u00f6r\u00f0ur. But there was no time! Teamwork We were all super productive the whole time. We worked from nine in the mornings to about ten in the evenings, and we made the machine work just in time to go back home. I'm really happy with my teammates and our instructors. \u00c1rni, Svavar, Hafey, Andri and \u00de\u00f3rarinn. The concept Why is the machine called TeaManator? Because my instructor \u00de\u00f3rarinn thought we should use something distinctive as an end stop: He added the tongue in Blender. Andri made some concept art for our machine using a Generative Pre-trained Transformer: Andri used GPT-enabled Bing to generate the images. Spiral 1 My job was to make the motors work. The first spiral was to control a stepper motor using an Arduino Uno and an L298N double H-bridge motor controller. I also added a servo to the Arduino Uno. The ATMega328 chip on the Uno is a bit old and outdated, but the Arduino Uno is still the best documented and supported microcontroller board in the world. So it's a good place to get something basic working. Machine control spiral 1: An L298N stepper driver module on a breadboard with a SparkFun RedBoard, which is essentially the same thing as an Arduino Uno. Spiral 2 The second spiral was to make a custom board with a Xiao RP2040 microcontroller module, a DRV8825 StepStick motor controller, 12V input pins from a lab power supply and GPIO breakout pins for a servo and two buttons. In Machine Week I made my biggest leap forward in electronics design. I also developed my methodical method of breadboarding and testing the hardware and software, one element at a time, before transferring the design over to KiCAD. Machine control spiral 2 on a breadboard: A DRV8825 StepStick stepper driver module. I first connected it to the SparkFun RedBoard but when I had that working I switched to the Xiao RP2040 module. PCB milling The custom board that I made is an extension of spiral 2. The first board had mounting holes that were far too big and the board also came loose during milling. Bad board and good board. I input a radius for the mounting holes when I thought I was defining their diameter. So the holes are huge on the board on the left! And I didn't use enough double-sided tape to secure the PCB blank, so it shifted during the trace milling operation. After a redesign, the second attempt to mill the board went well. The board on the right is the final version of the TeaManator controller board. I forgot the drilling holes for the electrolytic bulk storage capacitor for the stepper driver, so I drilled those holes afterwards. Hand drilling the last two holes in the board. I only had through-hole female headers, so I bent their legs in order to be able to surface mount them. Bending the legs for surface mounting. The final TeaManator machine controller. I learned a lesson in considering which side through-hole components need to be on in order to touch the copper traces. Close-up of the machine controller, all connected up. The TeaManator 2000 tea steeping machine. Clean and elegant. The reality behind the facade. The USB hub (with AC adapter) and the big power supply are only there to supply 5V to the Xiao and 12V to the stepper driver, because we didn't have time to learn how to make a power supply. I realized then that I need to think seriously about a neat way to power my final project. Design files Here are the KiCAD, SVG and PNG files for the TeaManator machine control board. I'm also including all the incremental test code. Other design files are at the bottom of our group project page . Download TeaManator KiCAD Download tea_machine.ino (final code) Download machine_week_nema17_l298n_test.ino Download machine_week_nema17_l298n_servo_test.ino Download machine_week_nema17_and_servo.ino Download machine_week_xiao_servo.ino Download machine_week_nema17_drv8825_test.ino Download machine_week_nema17_drv8825_test-svavars_board.ino Download machine_week_nema17_and_servo-svavars_board.ino Download machine_week_button_test.ino Spiral 3 The first Stepper Modular Thing that Andri made. He had to do considerable editing to make the circuit fabricatable. After 3D printing all the parts for the linear motion axis , Andri worked tirelessly on the third spiral, which was to make a Modular Things stepper controller board . We were really excited about this solution, but this design really thin traces and two layers, making it difficult to make. Andri made lots of modifications to the design in Fusion 360 in order to make the traces thicker and reroute everything so that it fit on one layer. He successfully milled the board and soldered all the components to it, but it didn't work. Unfortunately, the motor drivers in the original design need a higher voltage than 5V to work. The designer, Quentin Bols\u00e9e was very responsive to our questions. Then our instructors \u00c1rni and \u00de\u00f3rarinn, recalled that Yuichi Tamiya at Fab Lab Kannai made a working version of the Modular Things stepper control board at the 2023 instructor bootcamp in Amsterdam. Andri proceeded to make the board and \u00c1rni soldered the components onto it. It worked on the first try! The second Stepper Modular Thing that Andri made. Wow! It's powered by the USB port! We didn't have time to integrate it into our machine, but I'm very glad that Andri and our instructors were able to make a working Modular Thing. Seeing the Stepper Modular Thing working and being able to make the machine control board successfully combined to give me the confidence to try to make a robot arm joint immediately when I arrived back in \u00cdsafj\u00f6r\u00f0ur. See more info in Output Devices week . Our instructors We are lucky to have these instructors. Here are some images to prove it: Left: \u00de\u00f3rarinn explaining some electronics to Hafey. Right: Andri showing \u00c1rni the teabag holder. Attention: Coffee for Svavar! Our instructors took good care of us during this intense work session. My instructor \u00de\u00f3rarinn also disassembled a stepper motor and showed us how it works: Left: The stepper rotor, with its many iron teeth visible. Right: The step signal that turns the rotor (two of the phases are connected to the oscilloscope). .md-content__button { display: none; }","title":"10. Machine Week"},{"location":"assignments/week10.html#machine-week","text":"The Icelandic Machine Week team: Svavar, Andri and Hafey. Machine Week was fantastic. Check out our project page here: MACHINE WEEK PAGE For an intense three-day weekend, Andri S\u00e6mundsson , Hafey Viktor\u00eda Hallgr\u00edmsd\u00f3ttir and myself joined forces in Fab Lab Reykjav\u00edk to build a machine. Here's our presentation video: The build process and function of the TeaManator tea steeping machine. But it's better to watch the video with commentary from Hafey: Hafey presenting the TeaManator tea machine to Neil Gershenfeld and the Fab Academy class 2023. Neil liked how the machine is nicely finished and presented. Link to video. The clean aesthetics of the machine are thanks to Hafey, who designed and made the structure on the Shopbot and the laser cutter. She also made the logo on the vinyl cutter, which means that we used all the machines in the Fab Lab!","title":"Machine Week   "},{"location":"assignments/week10.html#travel-arrangements","text":"Our instructors, \u00c1rni Bj\u00f6rnsson and \u00de\u00f3rarinn Bjartur Brei\u00f0fj\u00f6r\u00f0 Gunnarsson came to support us during this intense session of work. \u00de\u00f3rarinn and I flew in from \u00cdsafj\u00f6r\u00f0ur and \u00c1rni flew in from Akureyri. Landing in \u00cdsafj\u00f6r\u00f0ur is more exhilarating than many people would like. \u00cdsafj\u00f6r\u00f0ur is one of the most difficult airports in Iceland. When the plane comes in, it needs to take a sharp U-turn at the bottom of the fjord before landing on the tarmac. People are often quite shocked, and I heard that two of the designers of the Dutch Fokker aircraft that used to fly this route came along for the ride once and commented that the aircraft was not designed for this kind of thing. If there's even a little bit of side wind toward the mountain, the flight is cancelled. Fortunately, the weather gods smiled upon us this time.","title":"Travel arrangements"},{"location":"assignments/week10.html#fab-lab-reykjavik","text":"On arriving in Fab Lab Reykjav\u00edk, I discovered that I was one of the orphaned 3D scans on display in the lobby. No long hair or beard back then. I made this with a Kinect 3D scanner many years ago with the help of Linda and Bas Withagen, soon after Fab Lab Reykjav\u00edk first opened. Then the Fab Lab was in Eddufell, in a small commercial center. I spent a lot of time there right after it opened. Good times. I wanted to take many more pictures inside Fab Lab Reykjav\u00edk, because they have designed and fabricated all sorts of clever solutions that I would like to replicate in Fab Lab \u00cdsafj\u00f6r\u00f0ur. But there was no time!","title":"Fab Lab Reykjav\u00edk"},{"location":"assignments/week10.html#teamwork","text":"We were all super productive the whole time. We worked from nine in the mornings to about ten in the evenings, and we made the machine work just in time to go back home. I'm really happy with my teammates and our instructors. \u00c1rni, Svavar, Hafey, Andri and \u00de\u00f3rarinn.","title":"Teamwork"},{"location":"assignments/week10.html#the-concept","text":"Why is the machine called TeaManator? Because my instructor \u00de\u00f3rarinn thought we should use something distinctive as an end stop: He added the tongue in Blender. Andri made some concept art for our machine using a Generative Pre-trained Transformer: Andri used GPT-enabled Bing to generate the images.","title":"The concept"},{"location":"assignments/week10.html#spiral-1","text":"My job was to make the motors work. The first spiral was to control a stepper motor using an Arduino Uno and an L298N double H-bridge motor controller. I also added a servo to the Arduino Uno. The ATMega328 chip on the Uno is a bit old and outdated, but the Arduino Uno is still the best documented and supported microcontroller board in the world. So it's a good place to get something basic working. Machine control spiral 1: An L298N stepper driver module on a breadboard with a SparkFun RedBoard, which is essentially the same thing as an Arduino Uno.","title":"Spiral 1"},{"location":"assignments/week10.html#spiral-2","text":"The second spiral was to make a custom board with a Xiao RP2040 microcontroller module, a DRV8825 StepStick motor controller, 12V input pins from a lab power supply and GPIO breakout pins for a servo and two buttons. In Machine Week I made my biggest leap forward in electronics design. I also developed my methodical method of breadboarding and testing the hardware and software, one element at a time, before transferring the design over to KiCAD. Machine control spiral 2 on a breadboard: A DRV8825 StepStick stepper driver module. I first connected it to the SparkFun RedBoard but when I had that working I switched to the Xiao RP2040 module.","title":"Spiral 2"},{"location":"assignments/week10.html#pcb-milling","text":"The custom board that I made is an extension of spiral 2. The first board had mounting holes that were far too big and the board also came loose during milling. Bad board and good board. I input a radius for the mounting holes when I thought I was defining their diameter. So the holes are huge on the board on the left! And I didn't use enough double-sided tape to secure the PCB blank, so it shifted during the trace milling operation. After a redesign, the second attempt to mill the board went well. The board on the right is the final version of the TeaManator controller board. I forgot the drilling holes for the electrolytic bulk storage capacitor for the stepper driver, so I drilled those holes afterwards. Hand drilling the last two holes in the board. I only had through-hole female headers, so I bent their legs in order to be able to surface mount them. Bending the legs for surface mounting. The final TeaManator machine controller. I learned a lesson in considering which side through-hole components need to be on in order to touch the copper traces. Close-up of the machine controller, all connected up. The TeaManator 2000 tea steeping machine. Clean and elegant. The reality behind the facade. The USB hub (with AC adapter) and the big power supply are only there to supply 5V to the Xiao and 12V to the stepper driver, because we didn't have time to learn how to make a power supply. I realized then that I need to think seriously about a neat way to power my final project.","title":"PCB milling"},{"location":"assignments/week10.html#design-files","text":"Here are the KiCAD, SVG and PNG files for the TeaManator machine control board. I'm also including all the incremental test code. Other design files are at the bottom of our group project page . Download TeaManator KiCAD Download tea_machine.ino (final code) Download machine_week_nema17_l298n_test.ino Download machine_week_nema17_l298n_servo_test.ino Download machine_week_nema17_and_servo.ino Download machine_week_xiao_servo.ino Download machine_week_nema17_drv8825_test.ino Download machine_week_nema17_drv8825_test-svavars_board.ino Download machine_week_nema17_and_servo-svavars_board.ino Download machine_week_button_test.ino","title":"Design files"},{"location":"assignments/week10.html#spiral-3","text":"The first Stepper Modular Thing that Andri made. He had to do considerable editing to make the circuit fabricatable. After 3D printing all the parts for the linear motion axis , Andri worked tirelessly on the third spiral, which was to make a Modular Things stepper controller board . We were really excited about this solution, but this design really thin traces and two layers, making it difficult to make. Andri made lots of modifications to the design in Fusion 360 in order to make the traces thicker and reroute everything so that it fit on one layer. He successfully milled the board and soldered all the components to it, but it didn't work. Unfortunately, the motor drivers in the original design need a higher voltage than 5V to work. The designer, Quentin Bols\u00e9e was very responsive to our questions. Then our instructors \u00c1rni and \u00de\u00f3rarinn, recalled that Yuichi Tamiya at Fab Lab Kannai made a working version of the Modular Things stepper control board at the 2023 instructor bootcamp in Amsterdam. Andri proceeded to make the board and \u00c1rni soldered the components onto it. It worked on the first try! The second Stepper Modular Thing that Andri made. Wow! It's powered by the USB port! We didn't have time to integrate it into our machine, but I'm very glad that Andri and our instructors were able to make a working Modular Thing. Seeing the Stepper Modular Thing working and being able to make the machine control board successfully combined to give me the confidence to try to make a robot arm joint immediately when I arrived back in \u00cdsafj\u00f6r\u00f0ur. See more info in Output Devices week .","title":"Spiral 3"},{"location":"assignments/week10.html#our-instructors","text":"We are lucky to have these instructors. Here are some images to prove it: Left: \u00de\u00f3rarinn explaining some electronics to Hafey. Right: Andri showing \u00c1rni the teabag holder. Attention: Coffee for Svavar! Our instructors took good care of us during this intense work session. My instructor \u00de\u00f3rarinn also disassembled a stepper motor and showed us how it works: Left: The stepper rotor, with its many iron teeth visible. Right: The step signal that turns the rotor (two of the phases are connected to the oscilloscope). .md-content__button { display: none; }","title":"Our instructors"},{"location":"assignments/week11.html","text":"Input Devices IR sensor board I used the IR phototransistor from the fab library twice, to represent both the IR emitter and the phototransistor. For this board I'm using obsolete parts from the 2012 Fab Lab inventory that still work really well; the OP280KT IR emitter and the matched OP580 phototransistor. They are have a more square shape than their modern counterparts, but I checked the datasheets and their footprints are close enough to what I have in the KiCAD library now. My Xiao IR sensor schematic. I looked at the phototransistor board on the Xiao page of the Fab Academy web site of Adrian Torres. It has a 1kOhm resistor on the IR emitter diode and a 10kOhm resistor on the IR sensor. I did the same here. My Xiao PCB design for the IR emitter and sensor. Instead of soldering the Xiao onto the board I'm using pin sockets. The milling went well using Fab Modules and the Roland Modela MDX-20, but the edges of the traces are a little bit rough. That's a sign of wear on the 1/64 inch bit. Milling the traces of the IR sensor board. Then I milled the board outline with the 1/32 inch bit. Milling the outline of the IR sensor board. Here's how the board looks: The IR sensor board. With the Xiao SAMD21 on board. Here's a video of the sensor readings, it works really well: Measuring the analog signal I connected the signal from the IR sensor to the oscilloscope and got a reading that changed with the light hitting the sensor: Design files I used code from Adrian Torres to get readings in the serial plotter, but I changed the number of the analog read pin to A10. Here's my KiCAD project: Download Xiao IR sensor board And the PCB milling files for Fab Modules or Mods: The traces. The interior (for milling the board outline). And the Arduino code: Download IR sensor Arduino code Magnetic encoder The board that I made for output devices has three pins that are intended for encoder input. I connected the encoder and wrote a simple test program in the Arduino IDE. The AS5048 encoder has a three-pin PWM output and a five-pin SPI output. I used the simpler PWM output. It was easier to route the board for those connections. In this case, I use the pulseIn Arduino command to read the length of the pulses coming into the pin from the encoder. Then I print the value to the serial monitor. Here's the whole program: const int encoder = 3 ; // AMS AS5048 encoder int pos = 0 ; //mechanical position of shaft void setup () { Serial . begin ( 9600 ); pinMode ( encoder , INPUT ); } void loop () { pos = pulseIn ( encoder , HIGH ); //read encoder pulse Serial . print ( pos ); Serial . print ( '\\n' ); delay ( 10 ); } And here's a video of it running: Then I connected the current sensing resistor of each H-bridge to a pin on the Xiao RP2040. Using analogRead, I was able to measure the current going through the motor! There is one value for each H-bridge, and the values are similar. The values fluctuate, so they depend on when the measurement is made. It seems to work! Measuring the digital signal from the encoder I measured the SPI signals coming out of the AS5048 magnetic encoder. The first thing to measure is the clock signal. This is what synchronizes the SPI communication. The clock signal from the A5048 magnetic angle sensor was weak. I thought that maybe the magnetic encoder chip wasn't supposed to generate a clock signal. In SPI communication, there's only one main IC, and it generates the clock signal. So I connected the encoder up to a SAMD21 chip (and also connected the motor driver). With the microcontroller connected, the clock signal is strong. But its shape is surprising. I thought it would just be a regular square wave with no gaps. Next up is the chip select pin. The chip select pin is used to select between secondary SPI nodes, if more than one are connected to the same main microcontroller. This is how the chip select signal looks: The chip select signal in blue, overlaid onto the clock signal in yellow. Then there's the data, MOSI and MISO. The MOSI signal. I tried rotating the motor while it was on the screen. I didn't see any significant changes. But the angle still appeared in the serial monitor. The MISO signal. Some disturbances happened when I turned the motor, but I couldn't see how the signal was coming across the wire. .md-content__button { display: none; }","title":"11. Input Devices"},{"location":"assignments/week11.html#input-devices","text":"","title":"Input Devices   "},{"location":"assignments/week11.html#ir-sensor-board","text":"I used the IR phototransistor from the fab library twice, to represent both the IR emitter and the phototransistor. For this board I'm using obsolete parts from the 2012 Fab Lab inventory that still work really well; the OP280KT IR emitter and the matched OP580 phototransistor. They are have a more square shape than their modern counterparts, but I checked the datasheets and their footprints are close enough to what I have in the KiCAD library now. My Xiao IR sensor schematic. I looked at the phototransistor board on the Xiao page of the Fab Academy web site of Adrian Torres. It has a 1kOhm resistor on the IR emitter diode and a 10kOhm resistor on the IR sensor. I did the same here. My Xiao PCB design for the IR emitter and sensor. Instead of soldering the Xiao onto the board I'm using pin sockets. The milling went well using Fab Modules and the Roland Modela MDX-20, but the edges of the traces are a little bit rough. That's a sign of wear on the 1/64 inch bit. Milling the traces of the IR sensor board. Then I milled the board outline with the 1/32 inch bit. Milling the outline of the IR sensor board. Here's how the board looks: The IR sensor board. With the Xiao SAMD21 on board. Here's a video of the sensor readings, it works really well:","title":"IR sensor board"},{"location":"assignments/week11.html#measuring-the-analog-signal","text":"I connected the signal from the IR sensor to the oscilloscope and got a reading that changed with the light hitting the sensor:","title":"Measuring the analog signal"},{"location":"assignments/week11.html#design-files","text":"I used code from Adrian Torres to get readings in the serial plotter, but I changed the number of the analog read pin to A10. Here's my KiCAD project: Download Xiao IR sensor board And the PCB milling files for Fab Modules or Mods: The traces. The interior (for milling the board outline). And the Arduino code: Download IR sensor Arduino code","title":"Design files"},{"location":"assignments/week11.html#magnetic-encoder","text":"The board that I made for output devices has three pins that are intended for encoder input. I connected the encoder and wrote a simple test program in the Arduino IDE. The AS5048 encoder has a three-pin PWM output and a five-pin SPI output. I used the simpler PWM output. It was easier to route the board for those connections. In this case, I use the pulseIn Arduino command to read the length of the pulses coming into the pin from the encoder. Then I print the value to the serial monitor. Here's the whole program: const int encoder = 3 ; // AMS AS5048 encoder int pos = 0 ; //mechanical position of shaft void setup () { Serial . begin ( 9600 ); pinMode ( encoder , INPUT ); } void loop () { pos = pulseIn ( encoder , HIGH ); //read encoder pulse Serial . print ( pos ); Serial . print ( '\\n' ); delay ( 10 ); } And here's a video of it running: Then I connected the current sensing resistor of each H-bridge to a pin on the Xiao RP2040. Using analogRead, I was able to measure the current going through the motor! There is one value for each H-bridge, and the values are similar. The values fluctuate, so they depend on when the measurement is made. It seems to work!","title":"Magnetic encoder"},{"location":"assignments/week11.html#measuring-the-digital-signal-from-the-encoder","text":"I measured the SPI signals coming out of the AS5048 magnetic encoder. The first thing to measure is the clock signal. This is what synchronizes the SPI communication. The clock signal from the A5048 magnetic angle sensor was weak. I thought that maybe the magnetic encoder chip wasn't supposed to generate a clock signal. In SPI communication, there's only one main IC, and it generates the clock signal. So I connected the encoder up to a SAMD21 chip (and also connected the motor driver). With the microcontroller connected, the clock signal is strong. But its shape is surprising. I thought it would just be a regular square wave with no gaps. Next up is the chip select pin. The chip select pin is used to select between secondary SPI nodes, if more than one are connected to the same main microcontroller. This is how the chip select signal looks: The chip select signal in blue, overlaid onto the clock signal in yellow. Then there's the data, MOSI and MISO. The MOSI signal. I tried rotating the motor while it was on the screen. I didn't see any significant changes. But the angle still appeared in the serial monitor. The MISO signal. Some disturbances happened when I turned the motor, but I couldn't see how the signal was coming across the wire. .md-content__button { display: none; }","title":"Measuring the digital signal from the encoder"},{"location":"assignments/week12.html","text":"Molding and Casting The Guerilla guide to CNC and resing casting I came across the Guerrilla guide to CNC and resin casting by Michal Zalewski some months ago and was fascinated by the depth of technical information and practical skills displayed in the guide and in his Omnibot Mk II build . Look at that tiny DIY planetary gearbox ! His adventures in CNC and robotics are inspiring. And others have used his methods successfully; just look at this hybrid walker robot ( video ).So I took his recommendations and tried to order the main materials that he uses for his resin casting projects: Medium density modeling board, Quantum Silicones Q262 molding silicone and Innovative Polymers IE-3075 polyurethane for casting parts with excellent material properties. The problem was that no one wanted to sell them to me: This seemed like a dead end. Then I remembered a service I had used once before to order something obscure from a US based company that didn't ship to Iceland: ShopUSA . You can have things shipped to their warehouse in Virginia, and they forward your package to any country in the world. They shipped my polyurethane resin by air, which was convenient, but the silicone that I needed to make molds went by sea, because that package was above a weight limit. I also found someone on eBay who was selling relatively small pieces of medium density modeling board for milling. This is the stuff that's in all the ads from Roland, it's dimensionally stable, easy to machine and leaves a nice surface finish, not unlike a metal mold that has been glass bead blasted: I like this stuff. It's like machinable wax, but with a filler, so you get a uniform, slightly textured finish. It takes well to sanding. Mold pattern 1 modeling and toolpath generation I decided to make a button for the Frankenstein MCU that my instructor \u00de\u00f3rarinn is working on with Francisco Sanchez Arroyo . You push the button and your Fab Lab goes online. You can see the Fab Labs appear on the spinning globe , with connections forming between all the labs. It's a simple design, mainly meant to test the 3D spiral toolpath in Fusion 360 and the surface finish that is achievable with the modeling board. First I embossed the text and tried a parallel 3D toolpath in Fusion 360: I couldn't get the detail I wanted with this approach. Next, I used a single line font for the text and used the Project command to mill it into the curved surface of the button with a 3D strategy: The single line font, coupled with the Project milling strategy, worked really well . Milling mold pattern 1 I used a simple fixturing method that my instructor \u00de\u00f3rarinn showed me. I used the zeroing plate on the Shopbot. The milling chips look really nice when running adaptive clearing. Roughing pass being completed. Finishing pass looking good. Molding mold pattern 1 Since my silicone hadn't arrived, I got the help of my father-in-law, who is a recently retired dentist. He uses Impregum Penta from 3M to take impressions of teeth. It comes with a handy machine which mixes the two parts in the right proportions and dispenses the silicone. Still knows all the moves. This dental silicone is really viscous, so it's hard to prevent bubbles from being trapped in it when dispensing into the mold. Also, I didn't add draft angles to the geometry, so it was hard to get the silicone out, and I damaged the mold pattern: I thought the mold was shallow enough to not need a draft angle, but it really does need it. Mold pattern 2 Comparison between the profile sketches of the first and second mold pattern. Section analysis of first and second mold pattern. The second mold pattern toolpath. On the left are the feeds and speeds for the text engraving. My father-in-law put some Vaseline on the edge of the second mold pattern. That worked well. With the addition of the draft angles, it was much easier to get the silicone mold out of the modeling board. I used a syringe to put the silicone in this second mold pattern, so that I could get into all the little engraved letters. That strategy worked, but I still got bubbles in the silicone. A bit of Vaseline as mold release. This is the only picture I have of the syringe, because I needed both hands to apply the silicone. The second mold came out well, except for a few bubbles. With the help of my father-in-law, I made another mold from this pattern, but it turned out the same. The right silicone arrives Then my silicone arrived from Hobby Silicone! I also got a vacuum pot that my friend P\u00e1ll Einarsson made. That's a great thing to have when molding and casting! But I was disappointed when I plugged it in. I got no vacuum at all. If anything, the needle on the meter went up. I tried pressing down on the lid to seal it, but no luck. Then I felt air blowing out from under the lid. Aha! DIY vacuum pot. When Sigur\u00f0ur, the electrical teacher at the school came to the lab I got him to take a look at the three-phase plug with me. He showed me how to open it and he suggested that I try swapping the brown and black wires. That worked! The vacuum pump now blows in the right direction and I can degas the silicone and polyurethane. I ordered QM-262, which is quite stiff with Shore 60A hardness, and also Soft 107, which only has a 7A Shore hardness. I tried the softer one. One part blue catalyst to ten parts silicone. Mixing the silicone and scraping the sides of the cup until the color is uniform with no streaks. I mixed one part catalyst (5g) with ten parts silicone (50g) as per the manufacturer's instructions, and tried to shear it (Stir only laterally, not up and down, so as not to get bubbles in the mix). When the color was even I put the mix into the vacuum pot and let it sit for five minutes. A lot of bubbles surfaced and popped. I then let air back in and put the vacuum back on twice for two minutes. Then I poured the light blue mixed silicone into the mold pattern (after spraying it with Ultralease from Hobby Silicone). I poured in a thin stream into the same place the whole time, really slowly. Then I put it into the vacuum pot. A surprising number of bubbles came up. After a whole work day, I checked on the remains of the mixed silicone in the cup. It was still flowing freely. That was discouraging. My wife A\u00f0albj\u00f6rg worked for OK Prosthetics making prosthetic legs and silicone liners and she said that they heated up the aluminum molds to accelerate the curing process. Any remains took days to cure. They used to put the silicone mixing nozzles into an oven to be able to get the remains out and use the nozzle again quickly. She said that it depends on the chemistry and especially the catalyst, but she said that my silicone might still cure. Degassing the mold after pouring. After a week of waiting and then going to the Fab Lab Bootcamp in Neskaupsta\u00f0ur for another week, I came back and removed the mold from the master pattern. This silicone mold is very flexible. I'm very happy with the results. There are absolutely no bubbles in the mold and the text was captured perfectly. But I may need to contact the silicone manufacturer, because the datasheet says that it should cure in 24 hours. That's a nice-looking mold with no bubbles. Trying to mix polyurethane I made an attempt to mix the IE-3075 polyurethane to cast it in the silicone mold. The bottles said that I should agitate them before mixing, because they may have settled in storage. I probably shook them way too hard, because when I put the 1:1 by volume mixture into the vacuum pot, it foamed and immediately started to set. I made a rigid foam. Those can be useful as core materials in composites, but that's not what I intended to make. Casting wax in the mold Just to cast something in the mold, I tried candle wax. So I took the mold home and heated up an old candle. Heating the candle wax over a water bath. When all the wax had turned liquid and transparent, I poured it carefully into the mold. I may have heated the wax too quickly. As it gets hotter, the wax continues to expand according to its coefficient of thermal expansion. Then I poured it into the mold and it starts to cool down and contract. The greater the difference between these two temperatures, the worse the warping and wrinkles get. Thermal contraction during cooling results in wrinkles on the surface. Then I removed the cast wax part from the silicone mold. A small hand immediately grabbed the cast wax part after I removed it from the mold. The resulting part is not nearly as smooth as the mold is. Apparently the thermal contractions were so bad that this side got wrinkled too. The surface is all wrinkled after thermal contraction. But the text came out well. My little helper played around with the mold for a second. This resulted in the mold being bent all the way backwards and developing cracks. He wasn't trying to damage it, he was just being very four years old. This is a lesson for me, the mold was too thin in the middle. I was trying to save material and I thought I could get away with it. \"There is a crack in everything. That's how the light gets in,\" sang Leonard Cohen . Safety Data Sheets The silicones and the polyurethane that I ordered came with Safety Data Sheets. Soft 107 Silicone Soft 107 is a very soft and flexible two-component, room temperature condensation cure silicone. It has a durometer of 6. I guess they named it before they got the results back from the official durometer test. The condensation cure (or tin curing) means that this silicone probably shrinks a bit when curing. But the great thing about this material is that it's food safe (indirect contact). Because it's so soft, it's also ideal to make pads for pad printing. I've wanted to try pad printing for years, now I have what I need to do it. The silicone is mixed with a blue catalyst in a 10:1 silicone to catalyst ratio by weight. It's recommended to mix the silicone in a clean plastic container with a volume 3-4 times the volume of the silicone being mixed. Then there's space for it to expand when you put it in a vacuum pot to draw out the bubbles. If you don't have access to a vacuum chamber, they recommend a high pour: You pour into the bottom corner of the mold the whole time, keeping the mixing cup high enough above it to form a thin, steady stream of silicone. The bubbles will be pushed out before entering the thin stream and then the silcone mass slowly levels itself in the mold. This is what I did, and I also used a vacuum pot. The Soft 107 silicone may be harmful in contact with skin. Always use disposable gloves when handling it. Ensure good ventilation while working with it. Make sure that it doesn't leak into the environment. You must make sure that it cures before disposing of any remains. It's good to have a shower, an eyewash station and a ventilation system. It would be good for the Fab Lab to get an eyewash station. Rinse with plenty of water if you get the uncured material on your skin, in your eyes or into your mouth. Quantum Silicones QM 262 Silicone QM 262 is a two-component, room temperature addition cure (platinum cure) silicon material. The platinum cure means that there is essentially no shrinkage while curing. This silicone has excellent mechanical properties and is Michal Zalewski's favorite silicone for precision mold making . It has a high durometer of 60 Shore A. You mix it 10:1 silicone to blue catalyst by weight. The mixing instructions are the same as for the Soft 107 silicone material. The safety instructions are also the same as for the Soft 107 silicone. Innovative Polymers IE-3075 polyurethane This very strong polyurethane is mixed 1:1 by volume from an isocyanate and a polyol: RAKU TOOL IE-3075 Isocyanate Rinse with plenty of water and call a poison center if you get the uncured material on yourself. It may cause respiratory irritation and damage to organs through prolonged or repeated exposure. The material is suspected of causing cancer. Use protective glasses, glove, clothing and in case of inadequate ventilation, wear respiratory protection. Heating may cause an explosion. Make sure to completely cure the material before discarding the container. RAKU TOOL IE-3075 Polyol The safety procedures are very similar to the isocyanate, except the polyol is less flammable. There is much less information in its Safety Data Sheet. Ultralease URE GP Urethane Parfilm Mold Release Ultralease URE GP is a solvent-less and virtually odorless mold release. It's heat stable to 315\u00b0C, so it should also be good for injection molding. It doesn't interfere with painting, coating or bonding. It should last for several molding cycles. It's recommended for use with polyurethanes, silicones and rubbers. Wear eye protection, long sleeves, chemical resistant gloves and an organic vapor respirator. Also ensure proper ventilation. The can may explode if heated and the contents may displace oxygen and cause rapid suffocation. Keep away from heat, sparks, flames and hot surfaces. Do not pierce or burn the pressurized container, even after use. And definitely don't spray it onto an open flame. Rinse with plenty of water if you get it on yourself. And because it's in an aerosol can, contact with the rapidly expanding gas may cause burns or frostbite, as the gas removes heat from your skin to fuel its expansion. .md-content__button { display: none; }","title":"12. Molding and Casting"},{"location":"assignments/week12.html#molding-and-casting","text":"","title":"Molding and Casting   "},{"location":"assignments/week12.html#the-guerilla-guide-to-cnc-and-resing-casting","text":"I came across the Guerrilla guide to CNC and resin casting by Michal Zalewski some months ago and was fascinated by the depth of technical information and practical skills displayed in the guide and in his Omnibot Mk II build . Look at that tiny DIY planetary gearbox ! His adventures in CNC and robotics are inspiring. And others have used his methods successfully; just look at this hybrid walker robot ( video ).So I took his recommendations and tried to order the main materials that he uses for his resin casting projects: Medium density modeling board, Quantum Silicones Q262 molding silicone and Innovative Polymers IE-3075 polyurethane for casting parts with excellent material properties. The problem was that no one wanted to sell them to me: This seemed like a dead end. Then I remembered a service I had used once before to order something obscure from a US based company that didn't ship to Iceland: ShopUSA . You can have things shipped to their warehouse in Virginia, and they forward your package to any country in the world. They shipped my polyurethane resin by air, which was convenient, but the silicone that I needed to make molds went by sea, because that package was above a weight limit. I also found someone on eBay who was selling relatively small pieces of medium density modeling board for milling. This is the stuff that's in all the ads from Roland, it's dimensionally stable, easy to machine and leaves a nice surface finish, not unlike a metal mold that has been glass bead blasted: I like this stuff. It's like machinable wax, but with a filler, so you get a uniform, slightly textured finish. It takes well to sanding.","title":"The Guerilla guide to CNC and resing casting"},{"location":"assignments/week12.html#mold-pattern-1-modeling-and-toolpath-generation","text":"I decided to make a button for the Frankenstein MCU that my instructor \u00de\u00f3rarinn is working on with Francisco Sanchez Arroyo . You push the button and your Fab Lab goes online. You can see the Fab Labs appear on the spinning globe , with connections forming between all the labs. It's a simple design, mainly meant to test the 3D spiral toolpath in Fusion 360 and the surface finish that is achievable with the modeling board. First I embossed the text and tried a parallel 3D toolpath in Fusion 360: I couldn't get the detail I wanted with this approach. Next, I used a single line font for the text and used the Project command to mill it into the curved surface of the button with a 3D strategy: The single line font, coupled with the Project milling strategy, worked really well .","title":"Mold pattern 1 modeling and toolpath generation"},{"location":"assignments/week12.html#milling-mold-pattern-1","text":"I used a simple fixturing method that my instructor \u00de\u00f3rarinn showed me. I used the zeroing plate on the Shopbot. The milling chips look really nice when running adaptive clearing. Roughing pass being completed. Finishing pass looking good.","title":"Milling mold pattern 1"},{"location":"assignments/week12.html#molding-mold-pattern-1","text":"Since my silicone hadn't arrived, I got the help of my father-in-law, who is a recently retired dentist. He uses Impregum Penta from 3M to take impressions of teeth. It comes with a handy machine which mixes the two parts in the right proportions and dispenses the silicone. Still knows all the moves. This dental silicone is really viscous, so it's hard to prevent bubbles from being trapped in it when dispensing into the mold. Also, I didn't add draft angles to the geometry, so it was hard to get the silicone out, and I damaged the mold pattern: I thought the mold was shallow enough to not need a draft angle, but it really does need it.","title":"Molding mold pattern 1"},{"location":"assignments/week12.html#mold-pattern-2","text":"Comparison between the profile sketches of the first and second mold pattern. Section analysis of first and second mold pattern. The second mold pattern toolpath. On the left are the feeds and speeds for the text engraving. My father-in-law put some Vaseline on the edge of the second mold pattern. That worked well. With the addition of the draft angles, it was much easier to get the silicone mold out of the modeling board. I used a syringe to put the silicone in this second mold pattern, so that I could get into all the little engraved letters. That strategy worked, but I still got bubbles in the silicone. A bit of Vaseline as mold release. This is the only picture I have of the syringe, because I needed both hands to apply the silicone. The second mold came out well, except for a few bubbles. With the help of my father-in-law, I made another mold from this pattern, but it turned out the same.","title":"Mold pattern 2"},{"location":"assignments/week12.html#the-right-silicone-arrives","text":"Then my silicone arrived from Hobby Silicone! I also got a vacuum pot that my friend P\u00e1ll Einarsson made. That's a great thing to have when molding and casting! But I was disappointed when I plugged it in. I got no vacuum at all. If anything, the needle on the meter went up. I tried pressing down on the lid to seal it, but no luck. Then I felt air blowing out from under the lid. Aha! DIY vacuum pot. When Sigur\u00f0ur, the electrical teacher at the school came to the lab I got him to take a look at the three-phase plug with me. He showed me how to open it and he suggested that I try swapping the brown and black wires. That worked! The vacuum pump now blows in the right direction and I can degas the silicone and polyurethane. I ordered QM-262, which is quite stiff with Shore 60A hardness, and also Soft 107, which only has a 7A Shore hardness. I tried the softer one. One part blue catalyst to ten parts silicone. Mixing the silicone and scraping the sides of the cup until the color is uniform with no streaks. I mixed one part catalyst (5g) with ten parts silicone (50g) as per the manufacturer's instructions, and tried to shear it (Stir only laterally, not up and down, so as not to get bubbles in the mix). When the color was even I put the mix into the vacuum pot and let it sit for five minutes. A lot of bubbles surfaced and popped. I then let air back in and put the vacuum back on twice for two minutes. Then I poured the light blue mixed silicone into the mold pattern (after spraying it with Ultralease from Hobby Silicone). I poured in a thin stream into the same place the whole time, really slowly. Then I put it into the vacuum pot. A surprising number of bubbles came up. After a whole work day, I checked on the remains of the mixed silicone in the cup. It was still flowing freely. That was discouraging. My wife A\u00f0albj\u00f6rg worked for OK Prosthetics making prosthetic legs and silicone liners and she said that they heated up the aluminum molds to accelerate the curing process. Any remains took days to cure. They used to put the silicone mixing nozzles into an oven to be able to get the remains out and use the nozzle again quickly. She said that it depends on the chemistry and especially the catalyst, but she said that my silicone might still cure. Degassing the mold after pouring. After a week of waiting and then going to the Fab Lab Bootcamp in Neskaupsta\u00f0ur for another week, I came back and removed the mold from the master pattern. This silicone mold is very flexible. I'm very happy with the results. There are absolutely no bubbles in the mold and the text was captured perfectly. But I may need to contact the silicone manufacturer, because the datasheet says that it should cure in 24 hours. That's a nice-looking mold with no bubbles.","title":"The right silicone arrives"},{"location":"assignments/week12.html#trying-to-mix-polyurethane","text":"I made an attempt to mix the IE-3075 polyurethane to cast it in the silicone mold. The bottles said that I should agitate them before mixing, because they may have settled in storage. I probably shook them way too hard, because when I put the 1:1 by volume mixture into the vacuum pot, it foamed and immediately started to set. I made a rigid foam. Those can be useful as core materials in composites, but that's not what I intended to make.","title":"Trying to mix polyurethane"},{"location":"assignments/week12.html#casting-wax-in-the-mold","text":"Just to cast something in the mold, I tried candle wax. So I took the mold home and heated up an old candle. Heating the candle wax over a water bath. When all the wax had turned liquid and transparent, I poured it carefully into the mold. I may have heated the wax too quickly. As it gets hotter, the wax continues to expand according to its coefficient of thermal expansion. Then I poured it into the mold and it starts to cool down and contract. The greater the difference between these two temperatures, the worse the warping and wrinkles get. Thermal contraction during cooling results in wrinkles on the surface. Then I removed the cast wax part from the silicone mold. A small hand immediately grabbed the cast wax part after I removed it from the mold. The resulting part is not nearly as smooth as the mold is. Apparently the thermal contractions were so bad that this side got wrinkled too. The surface is all wrinkled after thermal contraction. But the text came out well. My little helper played around with the mold for a second. This resulted in the mold being bent all the way backwards and developing cracks. He wasn't trying to damage it, he was just being very four years old. This is a lesson for me, the mold was too thin in the middle. I was trying to save material and I thought I could get away with it. \"There is a crack in everything. That's how the light gets in,\" sang Leonard Cohen .","title":"Casting wax in the mold"},{"location":"assignments/week12.html#safety-data-sheets","text":"The silicones and the polyurethane that I ordered came with Safety Data Sheets.","title":"Safety Data Sheets"},{"location":"assignments/week12.html#soft-107-silicone","text":"Soft 107 is a very soft and flexible two-component, room temperature condensation cure silicone. It has a durometer of 6. I guess they named it before they got the results back from the official durometer test. The condensation cure (or tin curing) means that this silicone probably shrinks a bit when curing. But the great thing about this material is that it's food safe (indirect contact). Because it's so soft, it's also ideal to make pads for pad printing. I've wanted to try pad printing for years, now I have what I need to do it. The silicone is mixed with a blue catalyst in a 10:1 silicone to catalyst ratio by weight. It's recommended to mix the silicone in a clean plastic container with a volume 3-4 times the volume of the silicone being mixed. Then there's space for it to expand when you put it in a vacuum pot to draw out the bubbles. If you don't have access to a vacuum chamber, they recommend a high pour: You pour into the bottom corner of the mold the whole time, keeping the mixing cup high enough above it to form a thin, steady stream of silicone. The bubbles will be pushed out before entering the thin stream and then the silcone mass slowly levels itself in the mold. This is what I did, and I also used a vacuum pot. The Soft 107 silicone may be harmful in contact with skin. Always use disposable gloves when handling it. Ensure good ventilation while working with it. Make sure that it doesn't leak into the environment. You must make sure that it cures before disposing of any remains. It's good to have a shower, an eyewash station and a ventilation system. It would be good for the Fab Lab to get an eyewash station. Rinse with plenty of water if you get the uncured material on your skin, in your eyes or into your mouth.","title":"Soft 107 Silicone"},{"location":"assignments/week12.html#quantum-silicones-qm-262-silicone","text":"QM 262 is a two-component, room temperature addition cure (platinum cure) silicon material. The platinum cure means that there is essentially no shrinkage while curing. This silicone has excellent mechanical properties and is Michal Zalewski's favorite silicone for precision mold making . It has a high durometer of 60 Shore A. You mix it 10:1 silicone to blue catalyst by weight. The mixing instructions are the same as for the Soft 107 silicone material. The safety instructions are also the same as for the Soft 107 silicone.","title":"Quantum Silicones QM 262 Silicone"},{"location":"assignments/week12.html#innovative-polymers-ie-3075-polyurethane","text":"This very strong polyurethane is mixed 1:1 by volume from an isocyanate and a polyol:","title":"Innovative Polymers IE-3075 polyurethane"},{"location":"assignments/week12.html#raku-tool-ie-3075-isocyanate","text":"Rinse with plenty of water and call a poison center if you get the uncured material on yourself. It may cause respiratory irritation and damage to organs through prolonged or repeated exposure. The material is suspected of causing cancer. Use protective glasses, glove, clothing and in case of inadequate ventilation, wear respiratory protection. Heating may cause an explosion. Make sure to completely cure the material before discarding the container.","title":"RAKU TOOL IE-3075 Isocyanate"},{"location":"assignments/week12.html#raku-tool-ie-3075-polyol","text":"The safety procedures are very similar to the isocyanate, except the polyol is less flammable. There is much less information in its Safety Data Sheet.","title":"RAKU TOOL IE-3075 Polyol"},{"location":"assignments/week12.html#ultralease-ure-gp-urethane-parfilm-mold-release","text":"Ultralease URE GP is a solvent-less and virtually odorless mold release. It's heat stable to 315\u00b0C, so it should also be good for injection molding. It doesn't interfere with painting, coating or bonding. It should last for several molding cycles. It's recommended for use with polyurethanes, silicones and rubbers. Wear eye protection, long sleeves, chemical resistant gloves and an organic vapor respirator. Also ensure proper ventilation. The can may explode if heated and the contents may displace oxygen and cause rapid suffocation. Keep away from heat, sparks, flames and hot surfaces. Do not pierce or burn the pressurized container, even after use. And definitely don't spray it onto an open flame. Rinse with plenty of water if you get it on yourself. And because it's in an aerosol can, contact with the rapidly expanding gas may cause burns or frostbite, as the gas removes heat from your skin to fuel its expansion. .md-content__button { display: none; }","title":"Ultralease URE GP Urethane Parfilm Mold Release"},{"location":"assignments/week13.html","text":"Networking and Communications Hello I2C I did this week's work in Neskaupsta\u00f0ur, which is as far as you can get from \u00cdsafj\u00f6r\u00f0ur in Iceland. I had to take two flights to get there! The annual Icelandic Fab Lab Bootcamp was held in Neskaupsta\u00f0ur this year. I think everybody got lots out of it and this was the first time that we set up a repo and a web site for an Icelandic bootcamp. We're under the influence of the 2023 Instructor's Bootcamp in Amsterdam. This is what I packed for the trip (plus a few items of clothing and a toothbrush): I brought a large part of our electronics inventory, just in case. I decided to do as Hafey did, and make Adrian's Hello I2C boards. The project consists of a master module which sends an I2C message and two nodes that receive the message and turn on an LED . I milled Adrian's boards and populated them with components. Hafey brought ATtiny412 ICs for me from Fab Lab Reykjav\u00edk. I2C node with adapter. For my own board I decided to use a laser diode. I had wanted to try one of those since I saw \u00c1rni Bj\u00f6rnsson's output devices video . My laser diode. My I2C laser diode PCB design. I tried the Gerber output for the first time. I tried FlatCAM, which went well, but the toolpath left thin strips of copper in between the traces. I would need to adjust the settings before milling again. It also didn't mill the text on the board, but I didn't really care about that. My laser diode I2C board after milling. In this milling machine, the FR1 PCB blanks are fastened to the wasteboard with double-sided tape. Here's my I2C laser board populated and connected to the adapter. Hafey pointed out to me that Adrian's Arduino code had an error. The master node was set up to send a message to one node, and then another. But the number of the node was the same in both cases: the command Wire.write(1) was in both places. After changing the second one to Wire.write(2) , the code worked. My board didn't work. I was able to program the IC, but the LED didn't turn on. Download I2C laser KiCAD project Download I2C master code Download I2C node 1 code Download I2C node2 code Fab Modules When discussing Fab Modules/Mods/Mods Project, few of our colleagues thought it was a little bit strange to turn vector drawings into bitmaps before running them through the CAM software. But Frosti told me that Neil had mentioned at some point that CNC machines operate on a bitmap grid in the end. That's a good point. We're using Computer Numerical Control, not Computer Vector Control. The Gcodes are all encoded as Cartesian coordinates with finite precision. So if you use a high enough resolution in the PNG image, you shouldn't lose any accuracy. The workflow works well, and I like using the old Fab Modules that we have running locally on a Linux laptop. Late in the evening I wanted to make the UPDI adapter that I needed to connect my Hello I2C boards to the computer. Frosti took the opportunity to open up Mods Project . After tweaking a few settings and figuring out that we needed to turn on the Save module and then click Calculate again to get an RML export, the milling went great! We just needed to set the file type to RML1 in the Roland control software, instead of RML NC code. microSD breakout board This video might be useful. It shows how to solder wires directly to an SD card to communicate with it. So the passive components on commercial SD breakout modules aren't strictly necessary, although I'm sure they make the communications more reliable. The PCB layout of my microSD card breakout board on the left and Janet Liu's schematic on the right. Her schematic shows which SD card pin matches which SPI pin on this particular SD card holder from Amphenol . You need to open the image in a new tab to see the pin names on my PCB layout. I didn't consider which way you put the SD card into the slot, so now that I've soldered the headers onto the board, the microSD card is stuck there for all eternity. I hope I can at least communicate with it. Download SD card breakout board KiCAD project .md-content__button { display: none; }","title":"13. Networking and Communications"},{"location":"assignments/week13.html#networking-and-communications","text":"","title":"Networking and Communications   "},{"location":"assignments/week13.html#hello-i2c","text":"I did this week's work in Neskaupsta\u00f0ur, which is as far as you can get from \u00cdsafj\u00f6r\u00f0ur in Iceland. I had to take two flights to get there! The annual Icelandic Fab Lab Bootcamp was held in Neskaupsta\u00f0ur this year. I think everybody got lots out of it and this was the first time that we set up a repo and a web site for an Icelandic bootcamp. We're under the influence of the 2023 Instructor's Bootcamp in Amsterdam. This is what I packed for the trip (plus a few items of clothing and a toothbrush): I brought a large part of our electronics inventory, just in case. I decided to do as Hafey did, and make Adrian's Hello I2C boards. The project consists of a master module which sends an I2C message and two nodes that receive the message and turn on an LED . I milled Adrian's boards and populated them with components. Hafey brought ATtiny412 ICs for me from Fab Lab Reykjav\u00edk. I2C node with adapter. For my own board I decided to use a laser diode. I had wanted to try one of those since I saw \u00c1rni Bj\u00f6rnsson's output devices video . My laser diode. My I2C laser diode PCB design. I tried the Gerber output for the first time. I tried FlatCAM, which went well, but the toolpath left thin strips of copper in between the traces. I would need to adjust the settings before milling again. It also didn't mill the text on the board, but I didn't really care about that. My laser diode I2C board after milling. In this milling machine, the FR1 PCB blanks are fastened to the wasteboard with double-sided tape. Here's my I2C laser board populated and connected to the adapter. Hafey pointed out to me that Adrian's Arduino code had an error. The master node was set up to send a message to one node, and then another. But the number of the node was the same in both cases: the command Wire.write(1) was in both places. After changing the second one to Wire.write(2) , the code worked. My board didn't work. I was able to program the IC, but the LED didn't turn on. Download I2C laser KiCAD project Download I2C master code Download I2C node 1 code Download I2C node2 code","title":"Hello I2C"},{"location":"assignments/week13.html#fab-modules","text":"When discussing Fab Modules/Mods/Mods Project, few of our colleagues thought it was a little bit strange to turn vector drawings into bitmaps before running them through the CAM software. But Frosti told me that Neil had mentioned at some point that CNC machines operate on a bitmap grid in the end. That's a good point. We're using Computer Numerical Control, not Computer Vector Control. The Gcodes are all encoded as Cartesian coordinates with finite precision. So if you use a high enough resolution in the PNG image, you shouldn't lose any accuracy. The workflow works well, and I like using the old Fab Modules that we have running locally on a Linux laptop. Late in the evening I wanted to make the UPDI adapter that I needed to connect my Hello I2C boards to the computer. Frosti took the opportunity to open up Mods Project . After tweaking a few settings and figuring out that we needed to turn on the Save module and then click Calculate again to get an RML export, the milling went great! We just needed to set the file type to RML1 in the Roland control software, instead of RML NC code.","title":"Fab Modules"},{"location":"assignments/week13.html#microsd-breakout-board","text":"This video might be useful. It shows how to solder wires directly to an SD card to communicate with it. So the passive components on commercial SD breakout modules aren't strictly necessary, although I'm sure they make the communications more reliable. The PCB layout of my microSD card breakout board on the left and Janet Liu's schematic on the right. Her schematic shows which SD card pin matches which SPI pin on this particular SD card holder from Amphenol . You need to open the image in a new tab to see the pin names on my PCB layout. I didn't consider which way you put the SD card into the slot, so now that I've soldered the headers onto the board, the microSD card is stuck there for all eternity. I hope I can at least communicate with it. Download SD card breakout board KiCAD project .md-content__button { display: none; }","title":"microSD breakout board"},{"location":"assignments/week14.html","text":"Interface and Application Programming Frankenstein MCU Presenting my addition to the FMCU to Neil Gershenfeld and Fab Academy Class 2023. Link to the video. Since my instructor \u00de\u00f3rarinn is working with Fran Sanchez on a IoT button that connects Fab Labs together, I decided to clone the repo and take a look at it. The current MCU uses Zoom to connect the Fab Labs together with live video feeds. So I searched for a way to embed Zoom into your own application and found the Zoom Meeting SDK for web , and in particular this JavaScript example . I cloned the repo, got a secret key as a Zoom Developer and I've got the authentication server running locally on Node.js, but the readme says that I need to send a POST request to the server. HTTP POST request trouble on the Zoom authentication server. Google didn't give me any understandable instructions on how to make a POST request to a localhost Node.js server. So I turned to ChatGPT. This is the first time I ask it to help me solve a problem. And help it did! ChatGPT to the rescue! Making an HTTP POST request using Postman. At the bottom you can see the signature that the Zoom authentication server returns. Thanks ChatGPT! The example works on its own. For this example to work you need to get a special authentication repo, which runs a node js server that listens. You go to Zoom Marketplace and get developer credentials and put them into the code in this repo and run the node server. Then when you press Join Meeting on the FMCU website, the website sends a message to the authentication server and gets a passkey, and then logs you into Zoom automatically. Now it's in a sidebar! I added the Zoom Meeting SDK JavaScript code, but the button doesn't work. I took to Mattermost and showed the non-working \"Join Meeting\" button to \u00de\u00f3rarinn and Fran. Fran replied with this: So I looked at the two CSS files in the FMCU repo and saw two instances of a hyperlink having the .repo CSS class. So I gave the Zoom code the .repo CSS class and ran the Node server: It works! Wow, I didn't really expect that. I can even move the window around. Now I need to connect this to a physical circuit. I'm looking into how the FMCU button uses MQTT to send a message to the Node server. I'm also looking for a way to do serial communication using Node. Here's the code with instructions on how to run it locally: Link to the FMCU-Zoom repo Visualizing a light sensor I experimented with creating a computer interface for the light sensor that I made in Input Devices week . The code that runs on the Xiao is very simple. It comes from Adri\u00e1n Torres. The code uses analogRead to read the value from the IR sensor and then writes it to the serial port: Download IR sensor Arduino code I used a few different Python scripts to receive the IR light values from the serial port. First I tried Neil's hello.light.45.py code but it reads single characters from the serial port and I had programmed the Xiao to send whole lines at a time. I stopped there with Neil's code. Then I found a nice tutorial showing how you can list the available COM devices in the terminal, pick one by typing its number, and open it using serial.tools. Once the serial port is open, I run an infinite while loop and read one line from the serial port at a time, decode it using UTF-8 character encoding and then turn that string into an integer. I do the reading and converting in only two lines of Python code: packet = serialInst . readline () y = int ( packet . decode ( 'utf' )) Then, to get a very rudimentary graphical representation going, I use an if statement and display one - if the value is between 0 and 100, display -- if the value is between 100 and 200 and so on, up to 1000 ( ---------- ). As simple as it gets. This barely counts as a graphical user interface. Download Python terminal visualization code I also tried to make a GUI using Tkinter. I found a useful code snippet in example 1 in this tutorial , which creates a small GUI window and displays a title and a red and green rectangle with empty space between them. It's static, but by using my y variable (the number that is streaming into the serial port) instead of hardcoded numbers, I can make the bar move. The static GUI example. I could get the Tkinter interface to run separately and I could also get a stream of data from the IR sensor separately, but I had trouble combining them. Apparently, the reason is that I have two infinite while loops and the one that comes first in the code blocks the other. While the code waits for input from the serial port, nothing else can happen. And while the interface is running, nothing else can happen. I couldn't figure this out using the examples that I found online. The following day I gave up and asked ChatGPT to change the code to make the two loops run concurrently. That resulted in code that ran, but I needed to make some changes to it. Only the grey bar was changing size between 0 and 1000 pixels, so I put 1000-y as the width of the black bar. That worked nicely. The interface was also sluggish, but I fixed that by changing root.after(100, readFromSerial) to root.after(10, readFromSerial) . Then there is a much shorter delay for updating the interface. We have a GUI that runs smoothly. Download Python GUI code Some random thoughts about my robot arm final project I've looked into many ways of making an interface for a robot arm. Highlights include: Python Tkinter ROS Urumbu Modular Things Processing P5.js Python script in Blender Python script in Fusion 360 Python script in FreeCAD FreeCAD robot workbench RoboDK Grasshopper WebSerial PyScript Webassembly ThreeJS OpenCV NodeJS Svelte Threlte The Pimoroni Pi Pico library Phew! Streamlit NiceGUI WebSockets WebTransport CodeMirror Chilipepr Crossbar.io libreconnect I like this minimal Pi Pico web server that was shared on Doctor Monk's DIY Electronics Blog. This captive portal guide also looks interesting. It uses the Pimoroni Phew! web server library, which is super simple and was written specifically for the Raspberry Pi Pico. I know that ROS is probably the way to go, but it's huge and I don't know where to start. Maybe here , since he asks at the start of the video if you want to build a robot that works with ROS but don't know where to start. .md-content__button { display: none; }","title":"14. Interface and Application Programming"},{"location":"assignments/week14.html#interface-and-application-programming","text":"","title":"Interface and Application Programming   "},{"location":"assignments/week14.html#frankenstein-mcu","text":"Presenting my addition to the FMCU to Neil Gershenfeld and Fab Academy Class 2023. Link to the video. Since my instructor \u00de\u00f3rarinn is working with Fran Sanchez on a IoT button that connects Fab Labs together, I decided to clone the repo and take a look at it. The current MCU uses Zoom to connect the Fab Labs together with live video feeds. So I searched for a way to embed Zoom into your own application and found the Zoom Meeting SDK for web , and in particular this JavaScript example . I cloned the repo, got a secret key as a Zoom Developer and I've got the authentication server running locally on Node.js, but the readme says that I need to send a POST request to the server. HTTP POST request trouble on the Zoom authentication server. Google didn't give me any understandable instructions on how to make a POST request to a localhost Node.js server. So I turned to ChatGPT. This is the first time I ask it to help me solve a problem. And help it did! ChatGPT to the rescue! Making an HTTP POST request using Postman. At the bottom you can see the signature that the Zoom authentication server returns. Thanks ChatGPT! The example works on its own. For this example to work you need to get a special authentication repo, which runs a node js server that listens. You go to Zoom Marketplace and get developer credentials and put them into the code in this repo and run the node server. Then when you press Join Meeting on the FMCU website, the website sends a message to the authentication server and gets a passkey, and then logs you into Zoom automatically. Now it's in a sidebar! I added the Zoom Meeting SDK JavaScript code, but the button doesn't work. I took to Mattermost and showed the non-working \"Join Meeting\" button to \u00de\u00f3rarinn and Fran. Fran replied with this: So I looked at the two CSS files in the FMCU repo and saw two instances of a hyperlink having the .repo CSS class. So I gave the Zoom code the .repo CSS class and ran the Node server: It works! Wow, I didn't really expect that. I can even move the window around. Now I need to connect this to a physical circuit. I'm looking into how the FMCU button uses MQTT to send a message to the Node server. I'm also looking for a way to do serial communication using Node. Here's the code with instructions on how to run it locally: Link to the FMCU-Zoom repo","title":"Frankenstein MCU"},{"location":"assignments/week14.html#visualizing-a-light-sensor","text":"I experimented with creating a computer interface for the light sensor that I made in Input Devices week . The code that runs on the Xiao is very simple. It comes from Adri\u00e1n Torres. The code uses analogRead to read the value from the IR sensor and then writes it to the serial port: Download IR sensor Arduino code I used a few different Python scripts to receive the IR light values from the serial port. First I tried Neil's hello.light.45.py code but it reads single characters from the serial port and I had programmed the Xiao to send whole lines at a time. I stopped there with Neil's code. Then I found a nice tutorial showing how you can list the available COM devices in the terminal, pick one by typing its number, and open it using serial.tools. Once the serial port is open, I run an infinite while loop and read one line from the serial port at a time, decode it using UTF-8 character encoding and then turn that string into an integer. I do the reading and converting in only two lines of Python code: packet = serialInst . readline () y = int ( packet . decode ( 'utf' )) Then, to get a very rudimentary graphical representation going, I use an if statement and display one - if the value is between 0 and 100, display -- if the value is between 100 and 200 and so on, up to 1000 ( ---------- ). As simple as it gets. This barely counts as a graphical user interface. Download Python terminal visualization code I also tried to make a GUI using Tkinter. I found a useful code snippet in example 1 in this tutorial , which creates a small GUI window and displays a title and a red and green rectangle with empty space between them. It's static, but by using my y variable (the number that is streaming into the serial port) instead of hardcoded numbers, I can make the bar move. The static GUI example. I could get the Tkinter interface to run separately and I could also get a stream of data from the IR sensor separately, but I had trouble combining them. Apparently, the reason is that I have two infinite while loops and the one that comes first in the code blocks the other. While the code waits for input from the serial port, nothing else can happen. And while the interface is running, nothing else can happen. I couldn't figure this out using the examples that I found online. The following day I gave up and asked ChatGPT to change the code to make the two loops run concurrently. That resulted in code that ran, but I needed to make some changes to it. Only the grey bar was changing size between 0 and 1000 pixels, so I put 1000-y as the width of the black bar. That worked nicely. The interface was also sluggish, but I fixed that by changing root.after(100, readFromSerial) to root.after(10, readFromSerial) . Then there is a much shorter delay for updating the interface. We have a GUI that runs smoothly. Download Python GUI code","title":"Visualizing a light sensor"},{"location":"assignments/week14.html#some-random-thoughts-about-my-robot-arm-final-project","text":"I've looked into many ways of making an interface for a robot arm. Highlights include: Python Tkinter ROS Urumbu Modular Things Processing P5.js Python script in Blender Python script in Fusion 360 Python script in FreeCAD FreeCAD robot workbench RoboDK Grasshopper WebSerial PyScript Webassembly ThreeJS OpenCV NodeJS Svelte Threlte The Pimoroni Pi Pico library Phew! Streamlit NiceGUI WebSockets WebTransport CodeMirror Chilipepr Crossbar.io libreconnect I like this minimal Pi Pico web server that was shared on Doctor Monk's DIY Electronics Blog. This captive portal guide also looks interesting. It uses the Pimoroni Phew! web server library, which is super simple and was written specifically for the Raspberry Pi Pico. I know that ROS is probably the way to go, but it's huge and I don't know where to start. Maybe here , since he asks at the start of the video if you want to build a robot that works with ROS but don't know where to start. .md-content__button { display: none; }","title":"Some random thoughts about my robot arm final project"},{"location":"assignments/week15.html","text":"Wild Card Week Vacuum forming Design The annual Icelandic Fab Lab Bootcamp was held in Fab Lab Neskaupsta\u00f0ur in 2023. M\u00f3ses, the Fab Lab manager, showed me how to do vacuum forming with the Mayku FormBox. The Mayku Formbox is a basic 200x200mm vacuum forming machine. Its heating element goes up to 340\u00b0C You need to connect a vacuum cleaner to it for it to work. It works fine, but I think I would find it annoying to have to bring the vacuum cleaner every time I used the vacuum former. First I brought a rectangular 3D print with #FABLABISA inscribed on it. M\u00f3ses said that the object was rather small, the edges needed to be rounded and he also showed me examples of details similar to my text not coming through in the plastic sheet. Vacuum forming examples. Sharp corners and small details don't work well. The text J\u00d3L (Christmas) in the upper right corner doesn't come through at all. He also mentioned that circular things tend to work the best. That makes sense, the stretching will be uniform and there are no corners where creases can form in the plastic sheet. I decided to go with the Fab Lab logo instead. I rounded the chocolate button shape and the logo as much as the geometry would allow, and showed M\u00f3ses a finished print. I used a Mayku Clear Sheet , which is made of PETG and is food safe. The official Mayku sheets are not cheap. Now that's a better candidate for vacuum forming. This should work nicely, said M\u00f3ses, but there is a question whether the pocket in the middle will be rendered in the plastic sheet. I decided to try making holes in the middle to let the air out. printed four versions of buttons with holes in the middle, for a total of five buttons: No holes 1.5 mm holes 2 mm holes 2 mm holes with small holes in the three smallest crevices 2 mm holes with rectangular holes in the three smallest crevices. Buttons with holes added to draw vacuum through the center of the part. 3D printing Completed 3D prints. Vacuum forming When the prints were complete, we connected a vacuum cleaner to the FormBox and heated it up. M\u00f3ses told me that the sheet needed to be heated up until it droops 2 or 3 cm down. Then it's soft enough to form. Sheet is hot and drooping and ready to form. Then the handle locks are released and the frame with the sheet is quickly lowered onto the bed containing the 3D printed positive forms. The vacuum is held for a minute or two and the sheet is allowed to cool down a bit. Then it's ready! Done! To my surprise, all the forms came out well. Apparently the tiny crevices in the logo were enough to get the air out of the middle part. The button with no holes actually came out the best! Better label both the buttons and the sheet to remember which is which. I tried measuring the depth of the pocket in the middle of each button with a caliper and they seemed to be the same, whether there were holes in the forms or not. Measuring the depth in the middle. I like vacuum forming but its applications are limited. It would be nice to host a short class on chocolate mold making here at the Fab Lab, but I don't know what else I would use the vacuum former for. Mostly it's used to make packaging, and I don't make any packaging. Chocolate casting I followed instructions from the Icelandic newspaper Morgunbla\u00f0i\u00f0, so they may not be useful to you. First melt the chocolate slowly in a water bath. Don't let any water get into the chocolate! You heat it up to 50\u00b0C. Melting the chocolate. Then you let the temperature drop to 42\u00b0C and you add a third of the same kind of chocolate (chopped) to the pot. Stir it into the melt. Chopping the rest of the chocolate. Putting the chopped chocolate into the pot. Then you let the temperature drop down to 32\u00b0C. Now the chocolate is tempered and you can pour it into the molds. Pouring into the molds. I also shook the molds to flatten out the bottoms of the buttons. And here they are, lined up with the 3D prints that created them. Row of chocolate Fab Lab buttons. They all came out well. I would skip the holes in the middle next time. Now it's time for testing. Is the chocolate tempered or not? I suspect that my next attempt may be more successful, since I won't have to take pictures and videos while I try to get the temperature profile right. Here's the Fusion model of the Fab Lab chocolate button: Download chocolate button CAD model Further experimentation It would be interesting to try printing the original Fab Lab button model with an increased distance between the 3D printed lines. I think that would make it possible to draw vacuum through the part without having to draw holes manually. And the top surface texture might be nicer, too. FPGA (Field-Programmable Gate Array) The Runber FPGA board. I think the program is supposed to blink the eight LEDs above the FPGA chip, but the tutorial isn't entirely clear about it. Why I've been curious about FPGAs for some time, for three reasons: I took the course Computer-Controlled Hardware in engineering school at the University of Iceland. One of the lecturers is an FPGA designer at the prosthetic company \u00d6ssur . He showed how the company's Power Knee has developed over the years from a large, complicated circuit board with lots of components to a much smaller footprint, essentially containing only a microcontroller and an FPGA. The FPGA can contain any digital circuitry, which is really interesting. My wife gave me the book Einstein's Shadow for Christmas a few years ago. It's an entertainingly written account of the quest to photograph a black hole. It involved extremely high-speed FPGA chips which made the Event Horizon Telescope possible. In the 2023 Fab Academy Student Bootcamp , Krisjanis Rijnieks mentioned that ordinary people now have the opportunity to design digital chips and have them made, using open source toolchains like Google Silicon . Google also offers the Open MPW program which is a lottery where you can submit your chip design and if you're lucky, Google will pay for the chip fabrication. I also discovered the Zero to ASIC course from Matt Venn, where he teaches people to design Application Specific Integrated Circuits from scratch and has them manufactured for them. Exciting stuff! Chips are designed using VDHL or Verilog, which are hardware description languages. You also use these languages to program FPGAs. Before committing a chip design to manufacturing, people usually test it on an FPGA. In Wild Card Week, I thought to myself: If I don't try an FPGA now, I probably never will. For this week, I ordered the cheapest FPGA board that I could find ($30) that still has a set of tutorials available, the Gowin RUNBER board from Seeed Studio. How An FPGA is a circuit that you design with a hardware description language. The unit of computation is is one bit, which can be a 1 or a 0. The FPGA consists of lots of Look-Up Tables (LUTs), which specify what happens when ones or zeros enter them. These LUTs form logic gates which can be combined to make any circuit, even a whole ARM microcontroller! Indeed, many FPGAs are programmed to contain a microcontroller core as well as additional digital ciruits. The Gowin GW1N-4 on my board has 4608 LUTs with four inputs each. I managed to get a license by sending an email to Gowin customer service and then I followed the first tutorial in this PDF . The circuit blinks eight LEDs, one at a time. In Gowin FPGA Designer, I created a new project: I selected FPGA Design Project: And then I selected my FPGA chip: I got a summary of the project settings before finishing the setup: Now I had a new FPGA design project. So far so good: I created a new Verilog file and pasted the code from the tutorial. Then I clicked Synthesize: That seemed to work OK. Then I went into Tools -> Floor Planner to see how the circuit looked inside the FPGA. I got an error. Then I looked a little further in the tutorial and found that I needed to create a constraints file, where I specify which pins I'm using and what they are. I went into File -> New, selected Physical Constraints File and gave it the name constr.cst: I just copied the code from the tutorial and pasted it into the constraints file. This is how it looks in the editor: The constraints file defines eight output pins that are supposed to turn eight LEDs on, one at a time. Now I could go into Tools -> Floor Planner. I got a graphical representation of the chip. I would expect eight pins to be highlighted in blue, but only six pins are highlighted. But I wouldn't know how to fix the code, and the messages in the terminal seemed positive: So I soldiered on. I selected the I/O Constraints at the bottom of the window and got a list of the constraints that were defined in the .cst file: The third and final file I needed to create before programming the FPGA was a timing constraints file. In FPGAs, everything is counted in clock cycles, and you need to define the clock frequency. I opened the Timing Constraints Editor: There I created a Timing Constraints file with a period of 83.333 nanoseconds and a frequency of 12 MHz: The timing constraints file only contains a single line of code specifying the clock speed: Then I selected Run Place and Route. That's the button with four colored squares that looks a bit like the Windows icon. I got a bunch of Generate file completed messages in the terminal, which seemed promising: If you look again at the image above, you can see the mouse hovering over the Programmer icon, which is a green downward arrow. I now pressed it and hoped for the best. I have no idea what I am doing. The programmer found a USB device, so I clicked Program/Configure. Here the tutorial ends, but I still needed to make some selections. I found the manual for the Programmer in the Gowin software directory and followed instructions that told me to select Embedded flash mode and select the .fs programming file in the dialog: When I clicked Save I got an error: This seems like a simple error, all I have to do is select the right chip. I tried selecting all the chips that have similar names to mine (GWIN-4, GWIN4B, GWIN-4D and all their versions). I had to select the chip both in the Floor Planner and in the Programmer. But nothing worked. I don't know where to go from here, so I'll stop. .md-content__button { display: none; }","title":"15. Wild Card Week"},{"location":"assignments/week15.html#wild-card-week","text":"","title":"Wild Card Week   "},{"location":"assignments/week15.html#vacuum-forming","text":"","title":"Vacuum forming"},{"location":"assignments/week15.html#design","text":"The annual Icelandic Fab Lab Bootcamp was held in Fab Lab Neskaupsta\u00f0ur in 2023. M\u00f3ses, the Fab Lab manager, showed me how to do vacuum forming with the Mayku FormBox. The Mayku Formbox is a basic 200x200mm vacuum forming machine. Its heating element goes up to 340\u00b0C You need to connect a vacuum cleaner to it for it to work. It works fine, but I think I would find it annoying to have to bring the vacuum cleaner every time I used the vacuum former. First I brought a rectangular 3D print with #FABLABISA inscribed on it. M\u00f3ses said that the object was rather small, the edges needed to be rounded and he also showed me examples of details similar to my text not coming through in the plastic sheet. Vacuum forming examples. Sharp corners and small details don't work well. The text J\u00d3L (Christmas) in the upper right corner doesn't come through at all. He also mentioned that circular things tend to work the best. That makes sense, the stretching will be uniform and there are no corners where creases can form in the plastic sheet. I decided to go with the Fab Lab logo instead. I rounded the chocolate button shape and the logo as much as the geometry would allow, and showed M\u00f3ses a finished print. I used a Mayku Clear Sheet , which is made of PETG and is food safe. The official Mayku sheets are not cheap. Now that's a better candidate for vacuum forming. This should work nicely, said M\u00f3ses, but there is a question whether the pocket in the middle will be rendered in the plastic sheet. I decided to try making holes in the middle to let the air out. printed four versions of buttons with holes in the middle, for a total of five buttons: No holes 1.5 mm holes 2 mm holes 2 mm holes with small holes in the three smallest crevices 2 mm holes with rectangular holes in the three smallest crevices. Buttons with holes added to draw vacuum through the center of the part.","title":"Design"},{"location":"assignments/week15.html#3d-printing","text":"Completed 3D prints.","title":"3D printing"},{"location":"assignments/week15.html#vacuum-forming_1","text":"When the prints were complete, we connected a vacuum cleaner to the FormBox and heated it up. M\u00f3ses told me that the sheet needed to be heated up until it droops 2 or 3 cm down. Then it's soft enough to form. Sheet is hot and drooping and ready to form. Then the handle locks are released and the frame with the sheet is quickly lowered onto the bed containing the 3D printed positive forms. The vacuum is held for a minute or two and the sheet is allowed to cool down a bit. Then it's ready! Done! To my surprise, all the forms came out well. Apparently the tiny crevices in the logo were enough to get the air out of the middle part. The button with no holes actually came out the best! Better label both the buttons and the sheet to remember which is which. I tried measuring the depth of the pocket in the middle of each button with a caliper and they seemed to be the same, whether there were holes in the forms or not. Measuring the depth in the middle. I like vacuum forming but its applications are limited. It would be nice to host a short class on chocolate mold making here at the Fab Lab, but I don't know what else I would use the vacuum former for. Mostly it's used to make packaging, and I don't make any packaging.","title":"Vacuum forming"},{"location":"assignments/week15.html#chocolate-casting","text":"I followed instructions from the Icelandic newspaper Morgunbla\u00f0i\u00f0, so they may not be useful to you. First melt the chocolate slowly in a water bath. Don't let any water get into the chocolate! You heat it up to 50\u00b0C. Melting the chocolate. Then you let the temperature drop to 42\u00b0C and you add a third of the same kind of chocolate (chopped) to the pot. Stir it into the melt. Chopping the rest of the chocolate. Putting the chopped chocolate into the pot. Then you let the temperature drop down to 32\u00b0C. Now the chocolate is tempered and you can pour it into the molds. Pouring into the molds. I also shook the molds to flatten out the bottoms of the buttons. And here they are, lined up with the 3D prints that created them. Row of chocolate Fab Lab buttons. They all came out well. I would skip the holes in the middle next time. Now it's time for testing. Is the chocolate tempered or not? I suspect that my next attempt may be more successful, since I won't have to take pictures and videos while I try to get the temperature profile right. Here's the Fusion model of the Fab Lab chocolate button: Download chocolate button CAD model","title":"Chocolate casting"},{"location":"assignments/week15.html#further-experimentation","text":"It would be interesting to try printing the original Fab Lab button model with an increased distance between the 3D printed lines. I think that would make it possible to draw vacuum through the part without having to draw holes manually. And the top surface texture might be nicer, too.","title":"Further experimentation"},{"location":"assignments/week15.html#fpga-field-programmable-gate-array","text":"The Runber FPGA board. I think the program is supposed to blink the eight LEDs above the FPGA chip, but the tutorial isn't entirely clear about it.","title":"FPGA (Field-Programmable Gate Array)"},{"location":"assignments/week15.html#why","text":"I've been curious about FPGAs for some time, for three reasons: I took the course Computer-Controlled Hardware in engineering school at the University of Iceland. One of the lecturers is an FPGA designer at the prosthetic company \u00d6ssur . He showed how the company's Power Knee has developed over the years from a large, complicated circuit board with lots of components to a much smaller footprint, essentially containing only a microcontroller and an FPGA. The FPGA can contain any digital circuitry, which is really interesting. My wife gave me the book Einstein's Shadow for Christmas a few years ago. It's an entertainingly written account of the quest to photograph a black hole. It involved extremely high-speed FPGA chips which made the Event Horizon Telescope possible. In the 2023 Fab Academy Student Bootcamp , Krisjanis Rijnieks mentioned that ordinary people now have the opportunity to design digital chips and have them made, using open source toolchains like Google Silicon . Google also offers the Open MPW program which is a lottery where you can submit your chip design and if you're lucky, Google will pay for the chip fabrication. I also discovered the Zero to ASIC course from Matt Venn, where he teaches people to design Application Specific Integrated Circuits from scratch and has them manufactured for them. Exciting stuff! Chips are designed using VDHL or Verilog, which are hardware description languages. You also use these languages to program FPGAs. Before committing a chip design to manufacturing, people usually test it on an FPGA. In Wild Card Week, I thought to myself: If I don't try an FPGA now, I probably never will. For this week, I ordered the cheapest FPGA board that I could find ($30) that still has a set of tutorials available, the Gowin RUNBER board from Seeed Studio.","title":"Why"},{"location":"assignments/week15.html#how","text":"An FPGA is a circuit that you design with a hardware description language. The unit of computation is is one bit, which can be a 1 or a 0. The FPGA consists of lots of Look-Up Tables (LUTs), which specify what happens when ones or zeros enter them. These LUTs form logic gates which can be combined to make any circuit, even a whole ARM microcontroller! Indeed, many FPGAs are programmed to contain a microcontroller core as well as additional digital ciruits. The Gowin GW1N-4 on my board has 4608 LUTs with four inputs each. I managed to get a license by sending an email to Gowin customer service and then I followed the first tutorial in this PDF . The circuit blinks eight LEDs, one at a time. In Gowin FPGA Designer, I created a new project: I selected FPGA Design Project: And then I selected my FPGA chip: I got a summary of the project settings before finishing the setup: Now I had a new FPGA design project. So far so good: I created a new Verilog file and pasted the code from the tutorial. Then I clicked Synthesize: That seemed to work OK. Then I went into Tools -> Floor Planner to see how the circuit looked inside the FPGA. I got an error. Then I looked a little further in the tutorial and found that I needed to create a constraints file, where I specify which pins I'm using and what they are. I went into File -> New, selected Physical Constraints File and gave it the name constr.cst: I just copied the code from the tutorial and pasted it into the constraints file. This is how it looks in the editor: The constraints file defines eight output pins that are supposed to turn eight LEDs on, one at a time. Now I could go into Tools -> Floor Planner. I got a graphical representation of the chip. I would expect eight pins to be highlighted in blue, but only six pins are highlighted. But I wouldn't know how to fix the code, and the messages in the terminal seemed positive: So I soldiered on. I selected the I/O Constraints at the bottom of the window and got a list of the constraints that were defined in the .cst file: The third and final file I needed to create before programming the FPGA was a timing constraints file. In FPGAs, everything is counted in clock cycles, and you need to define the clock frequency. I opened the Timing Constraints Editor: There I created a Timing Constraints file with a period of 83.333 nanoseconds and a frequency of 12 MHz: The timing constraints file only contains a single line of code specifying the clock speed: Then I selected Run Place and Route. That's the button with four colored squares that looks a bit like the Windows icon. I got a bunch of Generate file completed messages in the terminal, which seemed promising: If you look again at the image above, you can see the mouse hovering over the Programmer icon, which is a green downward arrow. I now pressed it and hoped for the best. I have no idea what I am doing. The programmer found a USB device, so I clicked Program/Configure. Here the tutorial ends, but I still needed to make some selections. I found the manual for the Programmer in the Gowin software directory and followed instructions that told me to select Embedded flash mode and select the .fs programming file in the dialog: When I clicked Save I got an error: This seems like a simple error, all I have to do is select the right chip. I tried selecting all the chips that have similar names to mine (GWIN-4, GWIN4B, GWIN-4D and all their versions). I had to select the chip both in the Floor Planner and in the Programmer. But nothing worked. I don't know where to go from here, so I'll stop. .md-content__button { display: none; }","title":"How"},{"location":"assignments/week16.html","text":"Applications and Implications Here's the proposal for my final project: What will it do? IT will be a small, light and nimble SCARA-type robot arm with a structure made of PCBs (with 3D printed additions to stiffen the structure) and powered by brushless gimbal motors with angle sensors. It will be a motion platform that I will add end effectors to later. Its purpose is to teach robotics and perhaps automate some small tasks. Who has done what beforehand? 1 2 3 4 5 6 7 8 9 10 11 12 This robot arm sketch by Masoud Akbarzadeh inspired the shape of spiral 1 of my robot arm joint. Avishek Das made a small RC servo robot arm with a really nice simulation interface. I just don't like the jerkiness and imprecision of RC servos. Dan Chen made a clean-looking modular robot that can grab sushi. It's also based on RC servos. A group at Fab Lab Oulu made a cardboard robot arm in machine week in 2020. It's also based on RC servos. Hiroaki Kimura made a simple RC-servo arm and controlled it with a smaller arm with potentiometers in the joints. Kenny Phay Ngiap Peng made a stepper and RC-servo robot arm. A group at Fab Lab IED Madrid made a stepper-based SCARA arm in Machine Week in 2019. A group at Super Fab Lab Kochi made a nice stepper based SCARA arm in Machine week 2023. Jules Topart made a promising robot joint actuator. Xiaomeng Liu made a very nice iPhone camera gimbal with brushless motors. These are the actuators I want to work with and this is the smoothness of motion that I want. Light and nimble, like a gimbal! Christian Schmidt made a very compact GoPro camera gimbal but it was a little jittery. Adam B\u00e4ckstr\u00f6m hacked hobby servos in an amazing way and created a super-precise robot arm. What will you design? A robot joint controller board that I can replicate to make all the joints on the robot. I'll also design a 3D printed part that hides all the wires. What materials and components will be used, where will they come from, how much will they cost? Part Part no. Amount Price (USD) Total (USD) Link GM2804 Gimbal Motor w/Encoder G006983_2 3 38.99 116.97 https://shop.iflight-rc.com/ipower-gm2804-gimbal-motor-with-as5048a-encoder-pro288 Gimbal Motor Slipring - OD 6.5mm G006983_2 2 15.99 31.98 https://shop.iflight-rc.com/6-5mm-slipring-for-ipower-motor-gm2804-gimbal-motor-pro302?search=Gimbal%20Motor%20Slipring%20-%20OD%206.5mm Yoctopuce Micro-USB-Hub-V2 MHUB0002 1 32.4 32.4 https://www.yoctopuce.com/EN/products/extensions-and-networking/micro-usb-hub-v2 Pololu Adjustable 4-12V Step-Up/Step-Down Voltage Regulator S18V20ALV 1 37.95 37.95 https://www.pololu.com/product/2572 ATSAMD21E18A-AUT ATSAMD21E18A-AUTTR-ND 3 4.23 12.69 https://www.digikey.com/en/products/detail/microchip-technology/ATSAMD21E18A-AUT/4878871 DRV8313PWP 296-35540-5-ND 3 4.81 14.43 https://www.digikey.com/en/products/detail/texas-instruments/DRV8313PWP/3790947 PLA filament 10 Total 256.42 What parts and systems will be made? I will make the control boards and 3D printed structure. What processes will be used? PCB milling, 3D printing. What questions need to be answered? Can I find an Arduino core for the SAMD21 that both the OSAP and SimpleFOC libraries can compile to? Can these two libraries coexist on the same microcontroller? Can I get the closed loop control working? Can I tune the PID? Can I mill the fine traces for the SAMD21 microcontroller and the DRV8313 brushless motor driver? Can I design such a complicated board? How will it be evaluated? If I manage to get one robot joint working, then I consider the final project a success. I will keep working on the robot after the final presentation. .md-content__button { display: none; }","title":"16. Applications and Implications"},{"location":"assignments/week16.html#applications-and-implications","text":"Here's the proposal for my final project:","title":"Applications and Implications   "},{"location":"assignments/week16.html#what-will-it-do","text":"IT will be a small, light and nimble SCARA-type robot arm with a structure made of PCBs (with 3D printed additions to stiffen the structure) and powered by brushless gimbal motors with angle sensors. It will be a motion platform that I will add end effectors to later. Its purpose is to teach robotics and perhaps automate some small tasks.","title":"What will it do?"},{"location":"assignments/week16.html#who-has-done-what-beforehand","text":"1 2 3 4 5 6 7 8 9 10 11 12 This robot arm sketch by Masoud Akbarzadeh inspired the shape of spiral 1 of my robot arm joint. Avishek Das made a small RC servo robot arm with a really nice simulation interface. I just don't like the jerkiness and imprecision of RC servos. Dan Chen made a clean-looking modular robot that can grab sushi. It's also based on RC servos. A group at Fab Lab Oulu made a cardboard robot arm in machine week in 2020. It's also based on RC servos. Hiroaki Kimura made a simple RC-servo arm and controlled it with a smaller arm with potentiometers in the joints. Kenny Phay Ngiap Peng made a stepper and RC-servo robot arm. A group at Fab Lab IED Madrid made a stepper-based SCARA arm in Machine Week in 2019. A group at Super Fab Lab Kochi made a nice stepper based SCARA arm in Machine week 2023. Jules Topart made a promising robot joint actuator. Xiaomeng Liu made a very nice iPhone camera gimbal with brushless motors. These are the actuators I want to work with and this is the smoothness of motion that I want. Light and nimble, like a gimbal! Christian Schmidt made a very compact GoPro camera gimbal but it was a little jittery. Adam B\u00e4ckstr\u00f6m hacked hobby servos in an amazing way and created a super-precise robot arm.","title":"Who has done what beforehand?"},{"location":"assignments/week16.html#what-will-you-design","text":"A robot joint controller board that I can replicate to make all the joints on the robot. I'll also design a 3D printed part that hides all the wires.","title":"What will you design?"},{"location":"assignments/week16.html#what-materials-and-components-will-be-used-where-will-they-come-from-how-much-will-they-cost","text":"Part Part no. Amount Price (USD) Total (USD) Link GM2804 Gimbal Motor w/Encoder G006983_2 3 38.99 116.97 https://shop.iflight-rc.com/ipower-gm2804-gimbal-motor-with-as5048a-encoder-pro288 Gimbal Motor Slipring - OD 6.5mm G006983_2 2 15.99 31.98 https://shop.iflight-rc.com/6-5mm-slipring-for-ipower-motor-gm2804-gimbal-motor-pro302?search=Gimbal%20Motor%20Slipring%20-%20OD%206.5mm Yoctopuce Micro-USB-Hub-V2 MHUB0002 1 32.4 32.4 https://www.yoctopuce.com/EN/products/extensions-and-networking/micro-usb-hub-v2 Pololu Adjustable 4-12V Step-Up/Step-Down Voltage Regulator S18V20ALV 1 37.95 37.95 https://www.pololu.com/product/2572 ATSAMD21E18A-AUT ATSAMD21E18A-AUTTR-ND 3 4.23 12.69 https://www.digikey.com/en/products/detail/microchip-technology/ATSAMD21E18A-AUT/4878871 DRV8313PWP 296-35540-5-ND 3 4.81 14.43 https://www.digikey.com/en/products/detail/texas-instruments/DRV8313PWP/3790947 PLA filament 10 Total 256.42","title":"What materials and components will be used, where will they come from, how much will they cost?"},{"location":"assignments/week16.html#what-parts-and-systems-will-be-made","text":"I will make the control boards and 3D printed structure.","title":"What parts and systems will be made?"},{"location":"assignments/week16.html#what-processes-will-be-used","text":"PCB milling, 3D printing.","title":"What processes will be used?"},{"location":"assignments/week16.html#what-questions-need-to-be-answered","text":"Can I find an Arduino core for the SAMD21 that both the OSAP and SimpleFOC libraries can compile to? Can these two libraries coexist on the same microcontroller? Can I get the closed loop control working? Can I tune the PID? Can I mill the fine traces for the SAMD21 microcontroller and the DRV8313 brushless motor driver? Can I design such a complicated board?","title":"What questions need to be answered?"},{"location":"assignments/week16.html#how-will-it-be-evaluated","text":"If I manage to get one robot joint working, then I consider the final project a success. I will keep working on the robot after the final presentation. .md-content__button { display: none; }","title":"How will it be evaluated?"},{"location":"assignments/week17.html","text":"Invention, Intellectual Property and Business Models License At the bottom of every page on this website, I claim my copyright over the work that I\u00b4ve done in the Fab Academy. I want to choose a permissive license for the work, all I want is to be mentioned if you use part of it in your own project. I thought about one of the Creative Commons licenses, but then I found that Creative Commons don't recommend their licenses for software or hardware. Others that may apply are the MIT license for software and the CERN open hardware license. This is a jungle, and I'm a bit confused. And if I choose a licence, I'll need to include it with all my design files. I don't have time for that right now. Maybe the \" Fab Lab license \" that Neil Gershenfeld puts in all his software files on the Fab Academy website would be a good license for my work. But again, I am already stretched to my limits trying to finish all the assignments and the documentation; I can't also go back and modify all the design files to include a license. I can only work from nine in the morning to midnight for so long. I need to see my family at some point. So for now, the work is copyright, and all rights are reserved. I will revisit this in the near future (I'll review choosealicense.com and Open Hardware Licenses ) and see how I can best open the work up for others to use. Feel free to contact me at \"svavar at fabisa dot is\" to get permission to use the stuff I've made. Funding plan I'm not going to start a company around this little robot. But I have applied for and received two grants to develop it. Grant #1 I applied for the first grant three months before the Fab Academy started. I had been thinking about final projects for a full year, because I was so excited about entering the Fab Academy. I really wanted to make something cool. Then when the deadline for the Icelandic Technology Development Fund rolled around, I had done quite a bit of thinking and I used that thinking to send in an application for the smallest grant of one million ISK (about $7000). And I got it! I used that grant to buy all the parts that I thought I would need for the arm, including molding and casting supplies to make precise gearboxes. The parts that I bought came in very handy, not all of them, but the rest will be useful to future Fab Academy students. Grant #2 The second grant was from the Icelandic Student Innovation Fund. I got funds to employ a university student for the summer to develop an interface for the robot in the Robot Operating System (ROS). That student is Gu\u00f0j\u00f3n Bergmann, a friend from engineering school. Many of his buddies at TU Delft are looking into ROS, so this project will be good for him. Fortunately I've managed to build the arm just in time for his summer project to start. Dissemination plan I've thought a lot about how to make my robot arm. But I haven't thought much about how to make sure that it reaches its intended user group. Who is its intended user? Someone like me who is interested in robotics, I guess? High school and university students, engineers, technology enthusiasts, computer science majors who want to program physical things and make them move? Who are they and where do they hang out online? How do I reach these people? I don't know. That's the part I'm not very good at. Do I set up a website? I guess I could let people in the Fab Lab community know about the project and see if anyone wants to buy an arm to use in their classes. But this is quite a tricky board to mill. And I could show the robot in the SimpleFOC community, except there everybody wants to build their own robot in their own style from scratch. I have a vague dream of asking Seeed Studio if they would be interested in selling populated boards in their online store as an easy way to get into robotics. I don't know. Maybe if the project gets featured in an article on Hackaday . Yes, that might be the right audience. Presentation files I made a final project presentation slide and video under Presentation . .md-content__button { display: none; }","title":"17. Invention, Intellectual Property and Business Models"},{"location":"assignments/week17.html#invention-intellectual-property-and-business-models","text":"","title":"Invention, Intellectual Property and Business Models   "},{"location":"assignments/week17.html#license","text":"At the bottom of every page on this website, I claim my copyright over the work that I\u00b4ve done in the Fab Academy. I want to choose a permissive license for the work, all I want is to be mentioned if you use part of it in your own project. I thought about one of the Creative Commons licenses, but then I found that Creative Commons don't recommend their licenses for software or hardware. Others that may apply are the MIT license for software and the CERN open hardware license. This is a jungle, and I'm a bit confused. And if I choose a licence, I'll need to include it with all my design files. I don't have time for that right now. Maybe the \" Fab Lab license \" that Neil Gershenfeld puts in all his software files on the Fab Academy website would be a good license for my work. But again, I am already stretched to my limits trying to finish all the assignments and the documentation; I can't also go back and modify all the design files to include a license. I can only work from nine in the morning to midnight for so long. I need to see my family at some point. So for now, the work is copyright, and all rights are reserved. I will revisit this in the near future (I'll review choosealicense.com and Open Hardware Licenses ) and see how I can best open the work up for others to use. Feel free to contact me at \"svavar at fabisa dot is\" to get permission to use the stuff I've made.","title":"License"},{"location":"assignments/week17.html#funding-plan","text":"I'm not going to start a company around this little robot. But I have applied for and received two grants to develop it.","title":"Funding plan"},{"location":"assignments/week17.html#grant-1","text":"I applied for the first grant three months before the Fab Academy started. I had been thinking about final projects for a full year, because I was so excited about entering the Fab Academy. I really wanted to make something cool. Then when the deadline for the Icelandic Technology Development Fund rolled around, I had done quite a bit of thinking and I used that thinking to send in an application for the smallest grant of one million ISK (about $7000). And I got it! I used that grant to buy all the parts that I thought I would need for the arm, including molding and casting supplies to make precise gearboxes. The parts that I bought came in very handy, not all of them, but the rest will be useful to future Fab Academy students.","title":"Grant #1"},{"location":"assignments/week17.html#grant-2","text":"The second grant was from the Icelandic Student Innovation Fund. I got funds to employ a university student for the summer to develop an interface for the robot in the Robot Operating System (ROS). That student is Gu\u00f0j\u00f3n Bergmann, a friend from engineering school. Many of his buddies at TU Delft are looking into ROS, so this project will be good for him. Fortunately I've managed to build the arm just in time for his summer project to start.","title":"Grant #2"},{"location":"assignments/week17.html#dissemination-plan","text":"I've thought a lot about how to make my robot arm. But I haven't thought much about how to make sure that it reaches its intended user group. Who is its intended user? Someone like me who is interested in robotics, I guess? High school and university students, engineers, technology enthusiasts, computer science majors who want to program physical things and make them move? Who are they and where do they hang out online? How do I reach these people? I don't know. That's the part I'm not very good at. Do I set up a website? I guess I could let people in the Fab Lab community know about the project and see if anyone wants to buy an arm to use in their classes. But this is quite a tricky board to mill. And I could show the robot in the SimpleFOC community, except there everybody wants to build their own robot in their own style from scratch. I have a vague dream of asking Seeed Studio if they would be interested in selling populated boards in their online store as an easy way to get into robotics. I don't know. Maybe if the project gets featured in an article on Hackaday . Yes, that might be the right audience.","title":"Dissemination plan"},{"location":"assignments/week17.html#presentation-files","text":"I made a final project presentation slide and video under Presentation . .md-content__button { display: none; }","title":"Presentation files"},{"location":"assignments/week18.html","text":"Project Development The final project plan. What tasks have been completed? I managed to put together one robot joint and test it successfully. I'm really glad that I got this far. Because my teaching duties were over for the semester, I was finally able to document as I went along. I found that enjoyable, and the final project documentation is much better for it. I will definitely aim for real-time documentation in my projects from now on. I approached the project in spirals, just trying to make one robot joint work. In the first spiral I took the Stepper RP2040 Modular Thing, changed its shape and added a few things. I did a lot of testing and debugging and found that the Toshiba TB67H451FNG motor drivers are not a good fit for brushless motors. That was quite a big blow, because I had spent a lot of time designing that board. But Rico Kanthatham urged us to make the major mistakes in our final project early, and I had done that. I did spiral 1 of my final project in Output Devices week (and two more weeks after that). So I still had time to find another motor driver and start the electronics design again from scratch. The evolution of my PCB design skills can be seen in these four boards: My PCB design journey. Right-click the image and select \"Open image in new tab\" to see the boards better. My first board was very simple; it had just a Xiao module, an LED with a current-limiting resistor and a button. It was a good first design project and I was so happy when I got the LED blinking! My first major design was the machine controller for the Teamanator , the Terminator-inspired tea machine that Andri , Hafey and myself finished successfully during an intense weekend in Fab Lab Reykjav\u00edk. My second major design was spiral 1 of my robot joint, which embeds a Xiao module onto the board. In spiral 2 I wanted to go further, so I embedded the SAMD21 chip directly onto the board, which I think is much cooler. I had the confidence to do that because I had played around with the RGBB Modular Thing and managed to program it with the Atmel-ICE. I think I've come a long way, because I had not designed an electronic circuit before starting the Fab Academy. What tasks remain? I need to connect the second robot joint and try operating them together. The robot is also missing a Z-axis (up and down). While I've been finishing up the documentation of the weekly assignments, I've been thinking about ways to make a clean-looking Z-axis. I think I have a good solution, but it requires a hack (see \"What questions need to be resolved?\"). I also need to make some kind of an end-effector. My instructor \u00de\u00f3rarinn helped me make an electromagnet , but in the final push, I had to do triage and decide which parts of the project needed to be abandoned and which ones I might finish. I put all my energy into getting the motor control board made and assembled with all the wires hidden. What has worked? What hasn't? Everything I've tested in the final project works. I made sure of that before I assembled it. It was a bit of a risk to wait so long before actually fabricating anything. In the last Global Open Time meeting on the Saturday before I presented, I was milling the first board for my final project, and my final project still looked like this: Meme from \u00de\u00f3rarinn. The actual image is the first one in \u00de\u00f3rarinn's meme at the bottom of this page. This was my second major attempt at milling this board. The first time I used a 0.01\" milling bit, which broke after an hour of milling, in the middle of the night. You can see my reaction to that in my presentation video . Then I tried a V-bit, which is much stronger and more durable, but if you don't get the Z-height exactly right, the track that it mills will either be too thin or to thick. This may result in traces disappearing. While Global Open Time was still in session, the Modela MDX-20 finished milling with the V-bit: The moment I realized that I could finish my final project. I went to the video conferencing screen in the lab with my hands up in victory, and they guys celebrated with me. Look at those beautiful traces. As for the outcome of the final project: I love how you can see all the electronic components and yet the robot has a clean look and all the wires are hidden. All of them! That took a LOT of system integration work. The robot actually looks pretty much exactly like I envisioned it. However, assembling the robot was hell. Wires kept breaking and it was difficult to get them into the 3D printed part. I wouldn't want to do it like this again. This was spiral 2. In spiral 3, I want to design a multilayer PCB that contains all the wires that are currently hidden inside the 3D printed part. Then I can realize my original vision of a robot that is made of motors and PCB and nothing else! And it will still have a clean look! The prospect is exciting, but my goodness, this project is so much work. I really need a good summer holiday. Also, pretty much all the software interface work remains. I did receive a grant for my engineering buddy Gu\u00f0j\u00f3n Bergmann to do some work on that. What questions need to be resolved? Magnetic angle sensor placed off-axis. The PID loop still worked. If I move it a tiny bit further away from the middle, the motor goes haywire. Can I place the magnetic angle sensor far enough from the axis so that I can put a Z-axis leadscrew through the hole in the middle of the brushless motor? I've done a preliminary test with me holding the sensor off-axis, and it may work. What will happen when? Note about the final push from \u00de\u00f3rarinn. We signed it and everything. It's sitting on the espresso corner table that I made in Machining week . The espresso was quite well extracted (here's another reference image ). Gu\u00f0j\u00f3n Bergmann , who is studying Aerospace Engineering at TU Delft, will make a mockup of the software interface, and possibly do some real-world testing this summer. His work will finish in the beginning of August. I don't know when I will have time to do more work on this project myself, but I really want to get my \"only motors and PCBs\" robot made at a PCB house. That would be so cool. But right now I'm on my last chance to write a Master's thesis in Mechanical Engineering this winter. So I'll probably focus on that in the fall of 2023. Gu\u00f0j\u00f3n Bergmann. What have you learned? Demand-side time management is a really important part of the Fab Academy, because if you just think about what you would like to accomplish and line those tasks up linearly, you will run out of time. The project must be designed with the time and energy you have in mind. Spiral development is also very useful to me. My instinct is to try to get all the way to a finished product in one giant leap, but spiral development, where you complete a rough and simple version first and then see if you have time for another complete spiral with more features, has much better results. As \u00de\u00f3rarinn said after I presented my final project, Fab Academy is really one big lesson in project management. However, I've never used Gantt charts or other detailed project management methods, because I like working in research and development, and there you never know where the road leads you or how long it's going to take. It's often possible to mock up a technical idea in a day or a weekend that you've been thinking about for years, and finally see if it's good or not. I've tried many things during the Fab Academy that I've been thinking about for a long time. And they're not as intimidating as they seemed. I've discovered the joy of distributed work on an open source community project (the Frankenstein MCU ). Look! I figured out a way to add Zoom video conferencing to the FMCU and made nice instructions on how to run that version locally, for the other developers. This is my first time doing this sort of thing and I really like it. I've discovered a world of collaborators in the Fab Lab network. My instructor \u00de\u00f3rarinn has been very supportive throughout this journey and taught me many things, especially relating to electronincs. He is the resident meme master in the Fab Academy chat on Mattermost. Seriously, I think half of the memes on the meme channel came from him. He even made a meme just for me by rearranging the image series on my ideas page: .md-content__button { display: none; }","title":"18-20. Project Development"},{"location":"assignments/week18.html#project-development","text":"The final project plan.","title":"Project Development   "},{"location":"assignments/week18.html#what-tasks-have-been-completed","text":"I managed to put together one robot joint and test it successfully. I'm really glad that I got this far. Because my teaching duties were over for the semester, I was finally able to document as I went along. I found that enjoyable, and the final project documentation is much better for it. I will definitely aim for real-time documentation in my projects from now on. I approached the project in spirals, just trying to make one robot joint work. In the first spiral I took the Stepper RP2040 Modular Thing, changed its shape and added a few things. I did a lot of testing and debugging and found that the Toshiba TB67H451FNG motor drivers are not a good fit for brushless motors. That was quite a big blow, because I had spent a lot of time designing that board. But Rico Kanthatham urged us to make the major mistakes in our final project early, and I had done that. I did spiral 1 of my final project in Output Devices week (and two more weeks after that). So I still had time to find another motor driver and start the electronics design again from scratch. The evolution of my PCB design skills can be seen in these four boards: My PCB design journey. Right-click the image and select \"Open image in new tab\" to see the boards better. My first board was very simple; it had just a Xiao module, an LED with a current-limiting resistor and a button. It was a good first design project and I was so happy when I got the LED blinking! My first major design was the machine controller for the Teamanator , the Terminator-inspired tea machine that Andri , Hafey and myself finished successfully during an intense weekend in Fab Lab Reykjav\u00edk. My second major design was spiral 1 of my robot joint, which embeds a Xiao module onto the board. In spiral 2 I wanted to go further, so I embedded the SAMD21 chip directly onto the board, which I think is much cooler. I had the confidence to do that because I had played around with the RGBB Modular Thing and managed to program it with the Atmel-ICE. I think I've come a long way, because I had not designed an electronic circuit before starting the Fab Academy.","title":"What tasks have been completed?"},{"location":"assignments/week18.html#what-tasks-remain","text":"I need to connect the second robot joint and try operating them together. The robot is also missing a Z-axis (up and down). While I've been finishing up the documentation of the weekly assignments, I've been thinking about ways to make a clean-looking Z-axis. I think I have a good solution, but it requires a hack (see \"What questions need to be resolved?\"). I also need to make some kind of an end-effector. My instructor \u00de\u00f3rarinn helped me make an electromagnet , but in the final push, I had to do triage and decide which parts of the project needed to be abandoned and which ones I might finish. I put all my energy into getting the motor control board made and assembled with all the wires hidden.","title":"What tasks remain?"},{"location":"assignments/week18.html#what-has-worked-what-hasnt","text":"Everything I've tested in the final project works. I made sure of that before I assembled it. It was a bit of a risk to wait so long before actually fabricating anything. In the last Global Open Time meeting on the Saturday before I presented, I was milling the first board for my final project, and my final project still looked like this: Meme from \u00de\u00f3rarinn. The actual image is the first one in \u00de\u00f3rarinn's meme at the bottom of this page. This was my second major attempt at milling this board. The first time I used a 0.01\" milling bit, which broke after an hour of milling, in the middle of the night. You can see my reaction to that in my presentation video . Then I tried a V-bit, which is much stronger and more durable, but if you don't get the Z-height exactly right, the track that it mills will either be too thin or to thick. This may result in traces disappearing. While Global Open Time was still in session, the Modela MDX-20 finished milling with the V-bit: The moment I realized that I could finish my final project. I went to the video conferencing screen in the lab with my hands up in victory, and they guys celebrated with me. Look at those beautiful traces. As for the outcome of the final project: I love how you can see all the electronic components and yet the robot has a clean look and all the wires are hidden. All of them! That took a LOT of system integration work. The robot actually looks pretty much exactly like I envisioned it. However, assembling the robot was hell. Wires kept breaking and it was difficult to get them into the 3D printed part. I wouldn't want to do it like this again. This was spiral 2. In spiral 3, I want to design a multilayer PCB that contains all the wires that are currently hidden inside the 3D printed part. Then I can realize my original vision of a robot that is made of motors and PCB and nothing else! And it will still have a clean look! The prospect is exciting, but my goodness, this project is so much work. I really need a good summer holiday. Also, pretty much all the software interface work remains. I did receive a grant for my engineering buddy Gu\u00f0j\u00f3n Bergmann to do some work on that.","title":"What has worked? What hasn't?"},{"location":"assignments/week18.html#what-questions-need-to-be-resolved","text":"Magnetic angle sensor placed off-axis. The PID loop still worked. If I move it a tiny bit further away from the middle, the motor goes haywire. Can I place the magnetic angle sensor far enough from the axis so that I can put a Z-axis leadscrew through the hole in the middle of the brushless motor? I've done a preliminary test with me holding the sensor off-axis, and it may work.","title":"What questions need to be resolved?"},{"location":"assignments/week18.html#what-will-happen-when","text":"Note about the final push from \u00de\u00f3rarinn. We signed it and everything. It's sitting on the espresso corner table that I made in Machining week . The espresso was quite well extracted (here's another reference image ). Gu\u00f0j\u00f3n Bergmann , who is studying Aerospace Engineering at TU Delft, will make a mockup of the software interface, and possibly do some real-world testing this summer. His work will finish in the beginning of August. I don't know when I will have time to do more work on this project myself, but I really want to get my \"only motors and PCBs\" robot made at a PCB house. That would be so cool. But right now I'm on my last chance to write a Master's thesis in Mechanical Engineering this winter. So I'll probably focus on that in the fall of 2023. Gu\u00f0j\u00f3n Bergmann.","title":"What will happen when?"},{"location":"assignments/week18.html#what-have-you-learned","text":"Demand-side time management is a really important part of the Fab Academy, because if you just think about what you would like to accomplish and line those tasks up linearly, you will run out of time. The project must be designed with the time and energy you have in mind. Spiral development is also very useful to me. My instinct is to try to get all the way to a finished product in one giant leap, but spiral development, where you complete a rough and simple version first and then see if you have time for another complete spiral with more features, has much better results. As \u00de\u00f3rarinn said after I presented my final project, Fab Academy is really one big lesson in project management. However, I've never used Gantt charts or other detailed project management methods, because I like working in research and development, and there you never know where the road leads you or how long it's going to take. It's often possible to mock up a technical idea in a day or a weekend that you've been thinking about for years, and finally see if it's good or not. I've tried many things during the Fab Academy that I've been thinking about for a long time. And they're not as intimidating as they seemed. I've discovered the joy of distributed work on an open source community project (the Frankenstein MCU ). Look! I figured out a way to add Zoom video conferencing to the FMCU and made nice instructions on how to run that version locally, for the other developers. This is my first time doing this sort of thing and I really like it. I've discovered a world of collaborators in the Fab Lab network. My instructor \u00de\u00f3rarinn has been very supportive throughout this journey and taught me many things, especially relating to electronincs. He is the resident meme master in the Fab Academy chat on Mattermost. Seriously, I think half of the memes on the meme channel came from him. He even made a meme just for me by rearranging the image series on my ideas page: .md-content__button { display: none; }","title":"What have you learned?"},{"location":"final-project/3d_design.html","text":"3D Design Basic shape study. Since the arm is made of flat PCBs, I don't have many parameters to play with. Dimensions of the basic shape. I set the diameter of all the arcs to d3 , which is the diameter of the first arc. I change that and the whole model updates. I eventually decided to make all the joints the same shape, because it's simpler and because when the big arcs go inward, there isn't enough room on the board for all the components. So here's the final 3D design: Here's baksi, the spiral 2 version. Download baksi spiral 2 concept Fusion 360 model Download 3D print with internal channels Download base .md-content__button { display: none; }","title":"3D Design"},{"location":"final-project/3d_design.html#3d-design","text":"Basic shape study. Since the arm is made of flat PCBs, I don't have many parameters to play with. Dimensions of the basic shape. I set the diameter of all the arcs to d3 , which is the diameter of the first arc. I change that and the whole model updates. I eventually decided to make all the joints the same shape, because it's simpler and because when the big arcs go inward, there isn't enough room on the board for all the components. So here's the final 3D design: Here's baksi, the spiral 2 version. Download baksi spiral 2 concept Fusion 360 model Download 3D print with internal channels Download base .md-content__button { display: none; }","title":"3D Design   "},{"location":"final-project/effector.html","text":"End effector I asked my instructor \u00de\u00f3rarinn whether he could show me how to make an electromagnet, so that baks the robot arm could pick up small metal things. I expected to need to find a suitable iron core and wrap copper wire around it, and that it would be a half-day thing. I had no idea it could be a 5 minute thing! \u00de\u00f3rarinn got a choke coil from the electronics inventory, tried a few different voltage and current combinations, and voil\u00e1! We can lift a small bearing ball! Then we tried soldering three choke coils together to see if the holding strength would increase: It did, you can't shake the ball off! But the current requirements might be too high for a USB port-powered robot. .md-content__button { display: none; }","title":"End Effector"},{"location":"final-project/effector.html#end-effector","text":"I asked my instructor \u00de\u00f3rarinn whether he could show me how to make an electromagnet, so that baks the robot arm could pick up small metal things. I expected to need to find a suitable iron core and wrap copper wire around it, and that it would be a half-day thing. I had no idea it could be a 5 minute thing! \u00de\u00f3rarinn got a choke coil from the electronics inventory, tried a few different voltage and current combinations, and voil\u00e1! We can lift a small bearing ball! Then we tried soldering three choke coils together to see if the holding strength would increase: It did, you can't shake the ball off! But the current requirements might be too high for a USB port-powered robot. .md-content__button { display: none; }","title":"End effector   "},{"location":"final-project/electronics.html","text":"Electronics Design Since both the SimpleFOC motor control library and the OSAP network layer library were able to compile onto the SAMD21 chip by pretending that it's an Arduino Nano 33 IoT, that's what I'm going with. Now I need to go into the schematic of this Arduino to see its pin definitions: The Arduino 33 IoT schematic. Let's see what we have here. The SPI communication wires from the AS5048 magnetic angle sensor are as follows: black, pin 1, CSn blue, pin 2, CLK yellow, pin 4, MOSI green, pin 3, MISO red, pin 11, VDD5V white, pin 13, GND SPI connections to the breadboard Modular Thing. Let's start with MOSI. That's pin PA16 on the Arduino Nano 33 IoT. I'll put the yellow wire there on the breadboard Modular Thing. Then MISO is pin PA19. I'll put the green wire there. Through all this tinkering I've learned that CLK is the same as SCK. That's pin PA17 and the blue wire goes there. Csn, or the chip select pin, can be assigned to any digital pin. I'll put on PA18 (the black wire). Then all that's left is +3.3 volts and ground. And here's the test: I'm getting an angle reading in the serial monitor! How cool is that? OK, let's connect the motor driver. For that we need PWM. According to the schematic above, the PWM pins on the Arduino Nano 33 IoT are as follows: Arduino 2 3 5 6 9 10 11 12 16/A2 17/A3 19/A5 SAMD21 PB10 PB11 PA05 PA04 PA20 PA21 Not on schematic Not on schematic PA11 PA10 PB09 On the the breadboard Modular Thing I only have access to PA pins. Let's try PA4, PA5 and PA10 for PWM and PA6 as the enable pin. In the Arduino code I'll set pins 6, 5 and 17 to output PWM and let pin 7 be the enable pin. After uploading the angle_control.ino sketch, the motor twitched a little bit and I got the following messages in the serial monitor: MOT: Enable driver. MOT: Align sensor. MOT: Failed to notice movement MOT: Init FOC failed. Motor ready. I must have mixed up some of the motor driver pins. Let's add my LED testing board to the circuit: No PWM on the brown wire. The enable pin turns on first (white wire) and then PWM starts on the yellow and orange wires. The brown wire (the third BLDC phase) gets no PWM signal. Aha! I still have digital pin 9 in the code, which applies to PA20! That's how I first wrote the code, but I then discovered that PA20 isn't broken out on the breadboard Thing. Alright, I'll change the third PWM pin from 9 to 17, which matches PA10, and see what happens. Now I'm getting PWM signals on all three phases, but the motor shakes like crazy. I wonder if I accidentally wired the phases in the wrong order. Nope, that isn't helping. I'll try removing the LED test board, since I'm done with that for now. And that was all it took! I have a smooth, responsive servomotor controlled by a bare SAMD21 chip! I won't upload a video of that, because my repository is getting quite big. This is it. Now I can design my board. PCB schematic design I added some electronic parts to a blank schematic. I used the RGBB Modular Thing as a reference design, to see which passive parts the microcontroller needs. I want to have an RGB status LED on the robot's 'forearm' and also a button to disengage the motors and record movements. I can see that the Modular Things follow the SAMD21 datasheet and put a 10uF decoupling capacitor on VDDIN (the +3.3V input voltage) and a 1uF decoupling capacitor on VDDCORE (the 1.23V core logic voltage of the chip, provided by an internal regulator): I don't know what decoupling capacitors do, but it's nice to verify the design using this table in the microcontroller datasheet. Then I started on the motor controller. I used the SimpleFOC Mini as a reference design. One of the goals of the SimpleFOC Mini is to: Make it in a way to be a minimal working example for users that are interested to build their own boards based on the DRV8313 chip. -Antun Skuric, creator of the SimpleFOC library The SimpleFOC Mini is a two-layer board, but I want to make a one-layer board. Fortunately I'm not using all the pins on the motor controller, so I can simplify the design: The DRV8313 motor driver has a 3.3 V regulator to run a microcontroller, but it only supplies 10 mA, which is not enough for me. So I'll skip that pin. On second thought, after reading about the pins that I haven't been using, I'm changing my opinion. I would like to be able to The DRV8313 motor driver wasn't in the KiCAD library or in the fab library, but I found it on SnapEDA. But I quickly ran into a problem with it: Pins 4 and 11 are superimposed. They are both VM, but on the SimpleFOC Mini schematic, they are connected differently. So I needed to right click the symbol and select Edit with Symbol Editor. Fortunately, all I had to do there was to move one of the pins: I also moved the three superimposed GND pins apart, so that I could read the pin numbers. I used this layout example heavily when reviewing my design. PCB routing I had to put eight jumper resistors in order to route this board on one layer. I also route the USB, power and SPI wires through the 3D printed layer that is sandwiched between the two PCBs in each joint. I look forward to redesigning the board for manufacturing at a board house, with 2-4 layers. That should make things considerably easier. But I am glad that I managed to design a version of the board that can be made in any Fab Lab on a single-layer PCB, because not everyone is skilled at making double-layer PCBs (myself included). Let's just put a wire that goes on the back side of the PCB. No one will know. PCB design review Now let me go over the schematic and PCB layout and see if everything checks out: Microcontroller USB (I didn't check whether the data lines are equally long. Let's hope for the best!) 3.3V regulator (PCB layout didn't match schematic, fixed now) Button RGB LED JTAG programming connector and associated passive components (Oh no: SWDCLK trace is routed under JTAG connector! Looking at the JTAG connector, this seems impossible. Time for another jumper resistor.) SPI connections to the magnetic angle sensor Decoupling capacitors for the microcontroller Rest of the microcontroller connections Motor driver Motor power input Power indicator LED FAULT, RESET and SLEEP on the motor driver Bulk capacitor and bypass capacitors for motor (One bypass cap wasn't connected to ground! Fixed now.) PWM signal wires between microcontroller and motor driver ENABLE wires and resistor Rest of motor driver connections ( Two motor outputs routed under jumper resistor. Not good. The two VCC pins on the motor driver weren't connected together. They clearly need to be, according to the layout example. Fixed with a trace underneath the BLDC motor. Must remember to cut some vinyl to insulate the motor from this +10V power trace. The layout example has resistors going between ground pins. The SimpleFOC Mini schematic has no such resistors. It worked on the breadboard, so I'll skip the resistors. I've run out of room on the PCB! One more thing that I changed from the SimpleFOC schematic, is that I'm connecting COMPO to ground, as the motor driver datasheet shows. I also tried to connect NC to ground, but NC means Not Connected and it doesn't want to connect to anything. Both the datasheet and the SimpleFOC Mini schematic connect the +3.3V output from the motor driver to FAULT, RESET and SLEEP, to pull them up. I just don't have space for that. I'll just mill the PCB and if the 3.3V supply from the microcontroller doesn't do the trick then I'll just have to solder a wire to the board afterwards. Let's go!) I had to stop milling the board and start again, because I forgot one of the comments I made in the last item on the Motor driver checklist. A very important comment that I've now italicized. So I fixed the PCB layout as follows: I realized that after going through the checklist and making the necessary changes, I didn't need those two jumper resistors anymore. I was also able to move the VCC track out from underneath the motor. Final design The baksi robot joint schematic. The baksi robot joint PCB layout. PCB production My first attempt at milling the PCB failed, and I broke the smallest end mill (the 0.01\" one). I only have one left now. Looking back, I made the fundamental mistake of not making a small test of the most challenging aspect of the process. I'm going to try a V-bit now. When soldering the teeny tiny DRV8313 motor driver onto my beautiful board, I found that its legs are very thin and flexible, and so when you've fastened the driver to the big ground plane with a heat gun, you can bend the legs into place, as long as they're close to their intended copper pad. After soldering, I successfully put the bootloader on the SAMD21 chip and then programmed it with the SimpleFOC code. But I got no reading from the magnetic angle sensor and the serial monitor said that it detected no movement from the motor. The motor sounded weird. Download baks KiCAD project Download baks_joint2_traces.png Download baks_joint2_traces_exterior.png Download baks_joint2_holes_interior.png .md-content__button { display: none; }","title":"Electronics Design"},{"location":"final-project/electronics.html#electronics-design","text":"Since both the SimpleFOC motor control library and the OSAP network layer library were able to compile onto the SAMD21 chip by pretending that it's an Arduino Nano 33 IoT, that's what I'm going with. Now I need to go into the schematic of this Arduino to see its pin definitions: The Arduino 33 IoT schematic. Let's see what we have here. The SPI communication wires from the AS5048 magnetic angle sensor are as follows: black, pin 1, CSn blue, pin 2, CLK yellow, pin 4, MOSI green, pin 3, MISO red, pin 11, VDD5V white, pin 13, GND SPI connections to the breadboard Modular Thing. Let's start with MOSI. That's pin PA16 on the Arduino Nano 33 IoT. I'll put the yellow wire there on the breadboard Modular Thing. Then MISO is pin PA19. I'll put the green wire there. Through all this tinkering I've learned that CLK is the same as SCK. That's pin PA17 and the blue wire goes there. Csn, or the chip select pin, can be assigned to any digital pin. I'll put on PA18 (the black wire). Then all that's left is +3.3 volts and ground. And here's the test: I'm getting an angle reading in the serial monitor! How cool is that? OK, let's connect the motor driver. For that we need PWM. According to the schematic above, the PWM pins on the Arduino Nano 33 IoT are as follows: Arduino 2 3 5 6 9 10 11 12 16/A2 17/A3 19/A5 SAMD21 PB10 PB11 PA05 PA04 PA20 PA21 Not on schematic Not on schematic PA11 PA10 PB09 On the the breadboard Modular Thing I only have access to PA pins. Let's try PA4, PA5 and PA10 for PWM and PA6 as the enable pin. In the Arduino code I'll set pins 6, 5 and 17 to output PWM and let pin 7 be the enable pin. After uploading the angle_control.ino sketch, the motor twitched a little bit and I got the following messages in the serial monitor: MOT: Enable driver. MOT: Align sensor. MOT: Failed to notice movement MOT: Init FOC failed. Motor ready. I must have mixed up some of the motor driver pins. Let's add my LED testing board to the circuit: No PWM on the brown wire. The enable pin turns on first (white wire) and then PWM starts on the yellow and orange wires. The brown wire (the third BLDC phase) gets no PWM signal. Aha! I still have digital pin 9 in the code, which applies to PA20! That's how I first wrote the code, but I then discovered that PA20 isn't broken out on the breadboard Thing. Alright, I'll change the third PWM pin from 9 to 17, which matches PA10, and see what happens. Now I'm getting PWM signals on all three phases, but the motor shakes like crazy. I wonder if I accidentally wired the phases in the wrong order. Nope, that isn't helping. I'll try removing the LED test board, since I'm done with that for now. And that was all it took! I have a smooth, responsive servomotor controlled by a bare SAMD21 chip! I won't upload a video of that, because my repository is getting quite big. This is it. Now I can design my board.","title":"Electronics Design   "},{"location":"final-project/electronics.html#pcb-schematic-design","text":"I added some electronic parts to a blank schematic. I used the RGBB Modular Thing as a reference design, to see which passive parts the microcontroller needs. I want to have an RGB status LED on the robot's 'forearm' and also a button to disengage the motors and record movements. I can see that the Modular Things follow the SAMD21 datasheet and put a 10uF decoupling capacitor on VDDIN (the +3.3V input voltage) and a 1uF decoupling capacitor on VDDCORE (the 1.23V core logic voltage of the chip, provided by an internal regulator): I don't know what decoupling capacitors do, but it's nice to verify the design using this table in the microcontroller datasheet. Then I started on the motor controller. I used the SimpleFOC Mini as a reference design. One of the goals of the SimpleFOC Mini is to: Make it in a way to be a minimal working example for users that are interested to build their own boards based on the DRV8313 chip. -Antun Skuric, creator of the SimpleFOC library The SimpleFOC Mini is a two-layer board, but I want to make a one-layer board. Fortunately I'm not using all the pins on the motor controller, so I can simplify the design: The DRV8313 motor driver has a 3.3 V regulator to run a microcontroller, but it only supplies 10 mA, which is not enough for me. So I'll skip that pin. On second thought, after reading about the pins that I haven't been using, I'm changing my opinion. I would like to be able to The DRV8313 motor driver wasn't in the KiCAD library or in the fab library, but I found it on SnapEDA. But I quickly ran into a problem with it: Pins 4 and 11 are superimposed. They are both VM, but on the SimpleFOC Mini schematic, they are connected differently. So I needed to right click the symbol and select Edit with Symbol Editor. Fortunately, all I had to do there was to move one of the pins: I also moved the three superimposed GND pins apart, so that I could read the pin numbers. I used this layout example heavily when reviewing my design.","title":"PCB schematic design"},{"location":"final-project/electronics.html#pcb-routing","text":"I had to put eight jumper resistors in order to route this board on one layer. I also route the USB, power and SPI wires through the 3D printed layer that is sandwiched between the two PCBs in each joint. I look forward to redesigning the board for manufacturing at a board house, with 2-4 layers. That should make things considerably easier. But I am glad that I managed to design a version of the board that can be made in any Fab Lab on a single-layer PCB, because not everyone is skilled at making double-layer PCBs (myself included). Let's just put a wire that goes on the back side of the PCB. No one will know.","title":"PCB routing"},{"location":"final-project/electronics.html#pcb-design-review","text":"Now let me go over the schematic and PCB layout and see if everything checks out:","title":"PCB design review"},{"location":"final-project/electronics.html#microcontroller","text":"USB (I didn't check whether the data lines are equally long. Let's hope for the best!) 3.3V regulator (PCB layout didn't match schematic, fixed now) Button RGB LED JTAG programming connector and associated passive components (Oh no: SWDCLK trace is routed under JTAG connector! Looking at the JTAG connector, this seems impossible. Time for another jumper resistor.) SPI connections to the magnetic angle sensor Decoupling capacitors for the microcontroller Rest of the microcontroller connections","title":"Microcontroller"},{"location":"final-project/electronics.html#motor-driver","text":"Motor power input Power indicator LED FAULT, RESET and SLEEP on the motor driver Bulk capacitor and bypass capacitors for motor (One bypass cap wasn't connected to ground! Fixed now.) PWM signal wires between microcontroller and motor driver ENABLE wires and resistor Rest of motor driver connections ( Two motor outputs routed under jumper resistor. Not good. The two VCC pins on the motor driver weren't connected together. They clearly need to be, according to the layout example. Fixed with a trace underneath the BLDC motor. Must remember to cut some vinyl to insulate the motor from this +10V power trace. The layout example has resistors going between ground pins. The SimpleFOC Mini schematic has no such resistors. It worked on the breadboard, so I'll skip the resistors. I've run out of room on the PCB! One more thing that I changed from the SimpleFOC schematic, is that I'm connecting COMPO to ground, as the motor driver datasheet shows. I also tried to connect NC to ground, but NC means Not Connected and it doesn't want to connect to anything. Both the datasheet and the SimpleFOC Mini schematic connect the +3.3V output from the motor driver to FAULT, RESET and SLEEP, to pull them up. I just don't have space for that. I'll just mill the PCB and if the 3.3V supply from the microcontroller doesn't do the trick then I'll just have to solder a wire to the board afterwards. Let's go!) I had to stop milling the board and start again, because I forgot one of the comments I made in the last item on the Motor driver checklist. A very important comment that I've now italicized. So I fixed the PCB layout as follows: I realized that after going through the checklist and making the necessary changes, I didn't need those two jumper resistors anymore. I was also able to move the VCC track out from underneath the motor.","title":"Motor driver"},{"location":"final-project/electronics.html#final-design","text":"The baksi robot joint schematic. The baksi robot joint PCB layout.","title":"Final design"},{"location":"final-project/electronics.html#pcb-production","text":"My first attempt at milling the PCB failed, and I broke the smallest end mill (the 0.01\" one). I only have one left now. Looking back, I made the fundamental mistake of not making a small test of the most challenging aspect of the process. I'm going to try a V-bit now. When soldering the teeny tiny DRV8313 motor driver onto my beautiful board, I found that its legs are very thin and flexible, and so when you've fastened the driver to the big ground plane with a heat gun, you can bend the legs into place, as long as they're close to their intended copper pad. After soldering, I successfully put the bootloader on the SAMD21 chip and then programmed it with the SimpleFOC code. But I got no reading from the magnetic angle sensor and the serial monitor said that it detected no movement from the motor. The motor sounded weird. Download baks KiCAD project Download baks_joint2_traces.png Download baks_joint2_traces_exterior.png Download baks_joint2_holes_interior.png .md-content__button { display: none; }","title":"PCB production"},{"location":"final-project/embedded_programming.html","text":"Embedded programming Modular Things RGBB Thing To get into Modular Things, I first tried to get the rgbb board to work. I managed to put a bootloader onto the SAMD21E18A chip on the rgbb board and then I put the Fab SAM core on it (first I put a different core on it and the COM port disappeared). I connected it to the computer, opened up the web interface and after a bit of coding the red LED turns on when I press the button! The RGB diode was very dim, so my instructor \u00de\u00f3rarinn took to the electronics workbench, tested it, replaced it with a new LED and discovered that the current-limiting resistors were 10kOhm instead of the recommended 1kOhm. I got to try the soldering tweezers for the first time and boy, are they handy for removing components from a board! \u00de\u00f3rarinn lighting up the diodes with the bench power supply. You just grab the part with the hot tweezers and it comes off! \u00de\u00f3rarinn explained diodes on the whiteboard and I discovered that I've had anodes and diodes backwards all these years! No wonder I was never particularly good at calculating electrical circuits. Stepper Thing I was happy to get the rgbb board working, but then the stepper Modular Thing that I made suddenly stopped working. I sent Quentin Bols\u00e9e the image above and asked him if he knew what the errors meant. But before he could reply, I remembered something that \u00c1rni Bj\u00f6rnsson had showed me. Apparently, the pin assignments have changed in the latest Modular Things code for the RP2040 board. I reverted them back to the original ones, and the stepper worked! My BLDC Thing I first tried to make a BLDC Modular Thing using the instructions in the Modular Things repository. I got an error that I couldn't figure out. SimpleFOC The SimpleFOC motor control library works when I use Xiao SAMD21 modules, but when I design my boards, I'd like to use the bare SAMD21E18A chip. I ran into an issue compiling the motor control code to the bare chip and asked for help on the SimpleFOC community forum: Error compiling to SAMD21 (bare chip) - SimpleFOC Community I got a very helpful answer from @AdinAck. Adin made a brushless DC motor control board with a SAMD21 chip running the SimpleFOC library. When programming the chip, he told the Arduino IDE that he was programming an Adafruit Feather board. All he had to do was to design the board so that it conformed to the Feather, and everything worked! So I tried to upload the motor control code to the rgbb Modular Things board by telling the Arduino IDE that I was uploading to a MattAirTech Xeno Mini. No luck. Then an Arduino MKRZERO. That didn't work either. Then an Arduino MKR1000. Nope. I was just randomly trying different boards and finally tried the Arduino Nano 33 IoT. It worked! I even got serial output from it: All I get is errors and zeroes, because there is no motor and no angle sensor connected to the MCU. But I'm happy, because the code compiled! Then I checked if I could upload the Modular Thing code to the SAMD21 chip under the pretense that it was an Arduino Nano 33 IoT. That worked too! I even got a 'false' response from the button (in the bottom right corner). Aw yeah! Next, I soldered the necessary parts onto the breadboard Thing and connected it up to the LED test board that I made in Electronics Production week. I wrote a loop that blinks every pin on the IC and sends the corresponding pin number to the serial port. With this I was able to identify which pin in the Arduino Nano 33 IoT board definition applied to which pin on the IC itself. .md-content__button { display: none; }","title":"Embedded programming"},{"location":"final-project/embedded_programming.html#embedded-programming","text":"","title":"Embedded programming   "},{"location":"final-project/embedded_programming.html#modular-things","text":"","title":"Modular Things"},{"location":"final-project/embedded_programming.html#rgbb-thing","text":"To get into Modular Things, I first tried to get the rgbb board to work. I managed to put a bootloader onto the SAMD21E18A chip on the rgbb board and then I put the Fab SAM core on it (first I put a different core on it and the COM port disappeared). I connected it to the computer, opened up the web interface and after a bit of coding the red LED turns on when I press the button! The RGB diode was very dim, so my instructor \u00de\u00f3rarinn took to the electronics workbench, tested it, replaced it with a new LED and discovered that the current-limiting resistors were 10kOhm instead of the recommended 1kOhm. I got to try the soldering tweezers for the first time and boy, are they handy for removing components from a board! \u00de\u00f3rarinn lighting up the diodes with the bench power supply. You just grab the part with the hot tweezers and it comes off! \u00de\u00f3rarinn explained diodes on the whiteboard and I discovered that I've had anodes and diodes backwards all these years! No wonder I was never particularly good at calculating electrical circuits.","title":"RGBB Thing"},{"location":"final-project/embedded_programming.html#stepper-thing","text":"I was happy to get the rgbb board working, but then the stepper Modular Thing that I made suddenly stopped working. I sent Quentin Bols\u00e9e the image above and asked him if he knew what the errors meant. But before he could reply, I remembered something that \u00c1rni Bj\u00f6rnsson had showed me. Apparently, the pin assignments have changed in the latest Modular Things code for the RP2040 board. I reverted them back to the original ones, and the stepper worked!","title":"Stepper Thing"},{"location":"final-project/embedded_programming.html#my-bldc-thing","text":"I first tried to make a BLDC Modular Thing using the instructions in the Modular Things repository. I got an error that I couldn't figure out.","title":"My BLDC Thing"},{"location":"final-project/embedded_programming.html#simplefoc","text":"The SimpleFOC motor control library works when I use Xiao SAMD21 modules, but when I design my boards, I'd like to use the bare SAMD21E18A chip. I ran into an issue compiling the motor control code to the bare chip and asked for help on the SimpleFOC community forum: Error compiling to SAMD21 (bare chip) - SimpleFOC Community I got a very helpful answer from @AdinAck. Adin made a brushless DC motor control board with a SAMD21 chip running the SimpleFOC library. When programming the chip, he told the Arduino IDE that he was programming an Adafruit Feather board. All he had to do was to design the board so that it conformed to the Feather, and everything worked! So I tried to upload the motor control code to the rgbb Modular Things board by telling the Arduino IDE that I was uploading to a MattAirTech Xeno Mini. No luck. Then an Arduino MKRZERO. That didn't work either. Then an Arduino MKR1000. Nope. I was just randomly trying different boards and finally tried the Arduino Nano 33 IoT. It worked! I even got serial output from it: All I get is errors and zeroes, because there is no motor and no angle sensor connected to the MCU. But I'm happy, because the code compiled! Then I checked if I could upload the Modular Thing code to the SAMD21 chip under the pretense that it was an Arduino Nano 33 IoT. That worked too! I even got a 'false' response from the button (in the bottom right corner). Aw yeah! Next, I soldered the necessary parts onto the breadboard Thing and connected it up to the LED test board that I made in Electronics Production week. I wrote a loop that blinks every pin on the IC and sends the corresponding pin number to the serial port. With this I was able to identify which pin in the Arduino Nano 33 IoT board definition applied to which pin on the IC itself. .md-content__button { display: none; }","title":"SimpleFOC"},{"location":"final-project/ideas.html","text":"Final Project I have three ideas for a final project: A robot arm, a wake-up mask and a digital stirring stick that tells me when to press down with the French press to get a perfect brew. Let's go through them one by one, with illustrations made in MS Paint. Idea 1: baks the robot arm I want to make a robot arm. I realize that this is not a particularly original idea, but I just dig robot arms. I want to call it baks, which is the noun version of the Icelandic verb baksa, which means to 'busy oneself' or to 'be industrious', usually in the context of toddlers playing energetically outside, digging and dragging logs about and throwing rocks. Here's a video where I describe the final project to Neil Gershenfeld during random review. A bit of history I backed the original uArm on Kickstarter and enjoyed using it until I accidentally connected the power supply directly to the Arduino instead to the uArm Shield. The plan was to have it fry doughnuts and frost them but the arm ended up getting fried instead. The software part also intimidated me. In one of the courses in my Mechanical Engineering degree, the task was to pick up a wine glass and give the professor a sip. The rationale behind it was to think about solutions for people with Parkinson's disease. My group put a lot of work into making a robot arm with a soft 3D printed hand that I found on Thingiverse. We converted an old theatre light (if you find one, they're a treasure trove, this one had 13 stepper motors). I designed and 3D printed all the custom components and the arm worked . But then I wanted an arm that can do more than one thing. When Fab Lab \u00cdsafj\u00f6r\u00f0ur needed something to impress the President of Iceland and the First Lady, Kar\u00edtas at Fab Lab Sau\u00f0\u00e1rkr\u00f3kur was kind enough to loan us a Niryo One , a 3D printed educational robot arm based on steppers and Dynamixel servos. I programmed the robot to hand the First Lady a laser cut business card , and had great fun experimenting with the robot. Then I fried one of its servo motors when a cable connector got stuck and the robot kept trying to move to its intended position. I managed to swap out the motor, but I thought that surely there's an improvement to be made here. Since educational robot arms seem to be mostly used to move colored cubes around, I think it's OK to scale them down and simplify them to make them less expensive and more accessible. I'd like to base my arm on brushless motors without any gearing, and use springs to balance it, just like the Anglepoise lamp . Then it's nimble and can't damage anything. It won't be able to lift much, but that's OK! I only want it to lift a paint brush or a colored cube. Outline of the idea Since I have a history of frying robot arms, I have a few personal design goals for my arm. Cannot hurt itself or the user Easily programmed by moving the arm and saving keypoints and paths Small Cheap Precise enough to paint candles The last point warrants a little explanation. Kertah\u00fasi\u00f0 in \u00cdsafj\u00f6r\u00f0ur manufacture candles in the shape of known buildings in the town and now around the country. It was started by a couple who were on parental leave but the money wasn't enough to live on. So they came to \u00de\u00f3rarinn at Fab Lab \u00cdsafj\u00f6r\u00f0ur and he helped them mold and cast the first candle houses. He was able to do this because of his experience at the Fab Academy. This was only last year, but their product offering now counts dozens of different designs. Every time they make a new design, the model is 3D printed here at Fab Lab \u00cdsafj\u00f6r\u00f0ur and S\u00e6d\u00eds and Gunnar make a silicone mold in their workshop. It's been a great collaboration. But hand painting the candles is a lot of work, so I wonder if it's possible to make a small, inexpensive and easy to use robot arm to paint the candles for them. Idea 2: Wake-up mask I live in \u00cdsafj\u00f6r\u00f0ur, and that means that when writing this, I have not seen the sun for two months. Last fall I found that waking up with my younger son became significantly harder as the sun came out later in the day. Then the sun disappeared altogether. The Seasonal Affective Disorder acronym seems apt: the mood is SAD. I want to get more light in the dark winter months, especially to wake me up naturally in the morning. I absolutely love the artificial sun by DIY Perks and I want to make one, but I'm not sure where to put it. Maybe in a big floor lamp similar to the Arco , but then the liquid coolant would need to be piped all the way between the stand and the light. I'll keep that one in my little black book of ideas for now. Since my wife feeds the baby during the night, I take him upstairs when he wakes up early in the morning. That way we can share the sleeplessness. But this means that I can't use a sunrise alarm clock, because I don't want to wake up my wife and my older boy. I wonder if a wake-up mask could work. I could take a sleeping mask and add a few LEDs that shine into my eyes for about ten minutes before my alarm sounds. Use a WiFi enabled microcontroller to sync the sunrise to the alarm clock on my phone. That's probably the most difficult part, but it would make it easy to use. Idea 3: pH pal for the French Press On my parental leave last fall, I wondered if I could use a pH meter to tell me exactly when to press down to make perfect French Press coffee every time. From experience (and reading online) I knew that with too short a brewing time the coffee becomes acidic and when you brew for too long, the coffee becomes bitter. So to get a baseline reference, every morning with my baby boy, I logged the quality of my morning cup. French press log started July 30 2022 Acidic: Balanced: Bitter: (I made the tally marks in Inkscape, by the way.) I found that the coffee grounds get more acidic as they age. As I don't have a coffee grinder, I buy ground coffee in bags that last for three to four weeks. In this experiment I found that freshly ground coffee only seems to be delicious for as long as a loaf of bread is: one day. Two days max. Those days I got great cups of coffee. So the result was not the one I wanted: Freshly ground coffee is much more important than pressing down at the right time. I also found that pourover or Vietnamese drip is makes a much better brew out of stale coffee grounds. Quite nice, actually. So there goes that idea, but I did find a source of low cost, high quality pH sensors and amplifiers. Believe me, this is cheap for what you get. To measure pH with the precision that you need to tell bitter coffee (pH 5.10) from acidic coffee (pH 4.85), your sensor amplifier needs to be isolated and well designed. Continuing the train of thought about coffee pH I also had the brilliant idea of putting a little bit of baking soda (which is basic) into the cup to 'fix' the coffee when it turned out acidic. I made the worst cup of coffee in history. A few people have encouraged me to make a prototype of the pH pal. Maybe later! .md-content__button { display: none; }","title":"Ideas"},{"location":"final-project/ideas.html#final-project","text":"I have three ideas for a final project: A robot arm, a wake-up mask and a digital stirring stick that tells me when to press down with the French press to get a perfect brew. Let's go through them one by one, with illustrations made in MS Paint.","title":"Final Project   "},{"location":"final-project/ideas.html#idea-1-baks-the-robot-arm","text":"I want to make a robot arm. I realize that this is not a particularly original idea, but I just dig robot arms. I want to call it baks, which is the noun version of the Icelandic verb baksa, which means to 'busy oneself' or to 'be industrious', usually in the context of toddlers playing energetically outside, digging and dragging logs about and throwing rocks. Here's a video where I describe the final project to Neil Gershenfeld during random review.","title":"Idea 1: baks the robot arm"},{"location":"final-project/ideas.html#a-bit-of-history","text":"I backed the original uArm on Kickstarter and enjoyed using it until I accidentally connected the power supply directly to the Arduino instead to the uArm Shield. The plan was to have it fry doughnuts and frost them but the arm ended up getting fried instead. The software part also intimidated me. In one of the courses in my Mechanical Engineering degree, the task was to pick up a wine glass and give the professor a sip. The rationale behind it was to think about solutions for people with Parkinson's disease. My group put a lot of work into making a robot arm with a soft 3D printed hand that I found on Thingiverse. We converted an old theatre light (if you find one, they're a treasure trove, this one had 13 stepper motors). I designed and 3D printed all the custom components and the arm worked . But then I wanted an arm that can do more than one thing. When Fab Lab \u00cdsafj\u00f6r\u00f0ur needed something to impress the President of Iceland and the First Lady, Kar\u00edtas at Fab Lab Sau\u00f0\u00e1rkr\u00f3kur was kind enough to loan us a Niryo One , a 3D printed educational robot arm based on steppers and Dynamixel servos. I programmed the robot to hand the First Lady a laser cut business card , and had great fun experimenting with the robot. Then I fried one of its servo motors when a cable connector got stuck and the robot kept trying to move to its intended position. I managed to swap out the motor, but I thought that surely there's an improvement to be made here. Since educational robot arms seem to be mostly used to move colored cubes around, I think it's OK to scale them down and simplify them to make them less expensive and more accessible. I'd like to base my arm on brushless motors without any gearing, and use springs to balance it, just like the Anglepoise lamp . Then it's nimble and can't damage anything. It won't be able to lift much, but that's OK! I only want it to lift a paint brush or a colored cube.","title":"A bit of history"},{"location":"final-project/ideas.html#outline-of-the-idea","text":"Since I have a history of frying robot arms, I have a few personal design goals for my arm. Cannot hurt itself or the user Easily programmed by moving the arm and saving keypoints and paths Small Cheap Precise enough to paint candles The last point warrants a little explanation. Kertah\u00fasi\u00f0 in \u00cdsafj\u00f6r\u00f0ur manufacture candles in the shape of known buildings in the town and now around the country. It was started by a couple who were on parental leave but the money wasn't enough to live on. So they came to \u00de\u00f3rarinn at Fab Lab \u00cdsafj\u00f6r\u00f0ur and he helped them mold and cast the first candle houses. He was able to do this because of his experience at the Fab Academy. This was only last year, but their product offering now counts dozens of different designs. Every time they make a new design, the model is 3D printed here at Fab Lab \u00cdsafj\u00f6r\u00f0ur and S\u00e6d\u00eds and Gunnar make a silicone mold in their workshop. It's been a great collaboration. But hand painting the candles is a lot of work, so I wonder if it's possible to make a small, inexpensive and easy to use robot arm to paint the candles for them.","title":"Outline of the idea"},{"location":"final-project/ideas.html#idea-2-wake-up-mask","text":"I live in \u00cdsafj\u00f6r\u00f0ur, and that means that when writing this, I have not seen the sun for two months. Last fall I found that waking up with my younger son became significantly harder as the sun came out later in the day. Then the sun disappeared altogether. The Seasonal Affective Disorder acronym seems apt: the mood is SAD. I want to get more light in the dark winter months, especially to wake me up naturally in the morning. I absolutely love the artificial sun by DIY Perks and I want to make one, but I'm not sure where to put it. Maybe in a big floor lamp similar to the Arco , but then the liquid coolant would need to be piped all the way between the stand and the light. I'll keep that one in my little black book of ideas for now. Since my wife feeds the baby during the night, I take him upstairs when he wakes up early in the morning. That way we can share the sleeplessness. But this means that I can't use a sunrise alarm clock, because I don't want to wake up my wife and my older boy. I wonder if a wake-up mask could work. I could take a sleeping mask and add a few LEDs that shine into my eyes for about ten minutes before my alarm sounds. Use a WiFi enabled microcontroller to sync the sunrise to the alarm clock on my phone. That's probably the most difficult part, but it would make it easy to use.","title":"Idea 2: Wake-up mask"},{"location":"final-project/ideas.html#idea-3-ph-pal-for-the-french-press","text":"On my parental leave last fall, I wondered if I could use a pH meter to tell me exactly when to press down to make perfect French Press coffee every time. From experience (and reading online) I knew that with too short a brewing time the coffee becomes acidic and when you brew for too long, the coffee becomes bitter. So to get a baseline reference, every morning with my baby boy, I logged the quality of my morning cup.","title":"Idea 3: pH pal for the French Press"},{"location":"final-project/ideas.html#french-press-log-started-july-30-2022","text":"Acidic: Balanced: Bitter: (I made the tally marks in Inkscape, by the way.) I found that the coffee grounds get more acidic as they age. As I don't have a coffee grinder, I buy ground coffee in bags that last for three to four weeks. In this experiment I found that freshly ground coffee only seems to be delicious for as long as a loaf of bread is: one day. Two days max. Those days I got great cups of coffee. So the result was not the one I wanted: Freshly ground coffee is much more important than pressing down at the right time. I also found that pourover or Vietnamese drip is makes a much better brew out of stale coffee grounds. Quite nice, actually. So there goes that idea, but I did find a source of low cost, high quality pH sensors and amplifiers. Believe me, this is cheap for what you get. To measure pH with the precision that you need to tell bitter coffee (pH 5.10) from acidic coffee (pH 4.85), your sensor amplifier needs to be isolated and well designed. Continuing the train of thought about coffee pH I also had the brilliant idea of putting a little bit of baking soda (which is basic) into the cup to 'fix' the coffee when it turned out acidic. I made the worst cup of coffee in history. A few people have encouraged me to make a prototype of the pH pal. Maybe later! .md-content__button { display: none; }","title":"French press log started July 30 2022"},{"location":"final-project/integration.html","text":"System integration USB hub communication I was unsure how to connect all the robot joints together for a while. Jake Read shows I2C communication using his OSAP library. It would make for a nicely integrated whole, but the joints that have I2C connections might react more slowly than the one in the base, which would be directly connected to the full-speed USB port. So two weeks before the final presentation I ordered a tiny USB hub from a Swiss company called Yoctopuce. When it arrived I was eager to see if it worked and whether it was able to supply enough current to two motors at the same time. So I took my bike over to Hamraborg, bought a USB-C cable, cut it in half and soldered the wires to connectors that I ordered with the USB hub. And here I am running two BLDC motors from one USB port on my laptop (note that this is a special 3A port with a battery charging logo on it): Now the whole thing works on a breadboard. Whew! Power budget A bit later I realized that I hadn't checked whether that single USB-C port on my laptop could support all the motors and things that I wanted to build into the arm. So I bought another USB-C cable, cut it in half and connected more motors. And here I have three brushless motors and one stepper motor running on one USB-C port at the same time: Instead of the stepper motor I'll actually be using a tiny DC motor as a Z-axis, and I also need to power an end effector. Assembling one robot joint The night before my final project presentation, I assembled one robot joint and managed to hide all the wires. Here it is running a PID cascade control loop using the SimpleFOC library: Phew, it works! .md-content__button { display: none; }","title":"System integration"},{"location":"final-project/integration.html#system-integration","text":"","title":"System integration   "},{"location":"final-project/integration.html#usb-hub-communication","text":"I was unsure how to connect all the robot joints together for a while. Jake Read shows I2C communication using his OSAP library. It would make for a nicely integrated whole, but the joints that have I2C connections might react more slowly than the one in the base, which would be directly connected to the full-speed USB port. So two weeks before the final presentation I ordered a tiny USB hub from a Swiss company called Yoctopuce. When it arrived I was eager to see if it worked and whether it was able to supply enough current to two motors at the same time. So I took my bike over to Hamraborg, bought a USB-C cable, cut it in half and soldered the wires to connectors that I ordered with the USB hub. And here I am running two BLDC motors from one USB port on my laptop (note that this is a special 3A port with a battery charging logo on it): Now the whole thing works on a breadboard. Whew!","title":"USB hub communication"},{"location":"final-project/integration.html#power-budget","text":"A bit later I realized that I hadn't checked whether that single USB-C port on my laptop could support all the motors and things that I wanted to build into the arm. So I bought another USB-C cable, cut it in half and connected more motors. And here I have three brushless motors and one stepper motor running on one USB-C port at the same time: Instead of the stepper motor I'll actually be using a tiny DC motor as a Z-axis, and I also need to power an end effector.","title":"Power budget"},{"location":"final-project/integration.html#assembling-one-robot-joint","text":"The night before my final project presentation, I assembled one robot joint and managed to hide all the wires. Here it is running a PID cascade control loop using the SimpleFOC library: Phew, it works! .md-content__button { display: none; }","title":"Assembling one robot joint"},{"location":"final-project/interface.html","text":"Interface My own thing I tried the PhysicalPixel example that is built into the Arduino IDE and modified it to send commands to the motor. In the video below I am controlling the motors with Python through two serial ports at the same time. The left motor rotates twice and the right motor rotates once. I found a fantastic example of using WebSerial in Py-Script and tried to run it locally. I got the error: Access to Image from origin 'null' has been blocked by CORS policy I found that this is because the code can only be run on a server. I tried to set up a local server using web.py but that didn't work and then I tried uploading the example to my website but the MkDocs system dindn't allow me to access the page. Then I found this tutorial on how to deploy a Py-Script app to GitHub Pages. That was easy, it's just a matter of creating a repository, uploading the files and going into settings and enabling deployment: The repository itself is under the Code tab. Go into the Settings tab, select Pages in the left menu and under Build and Deployment select main , root and click Save. As long as the repository has a file called index.html, the site is now live! Here's a link to the two motor control interface: baks Here I'm connecting to a COM device and sending it the command T12, which means 'Turn by 12 radians', or just about two rotations: It works! I couldn't be more excited! I'm getting close to having the full stack of technologies working. And the interface is live on the web! I made some very rough modifications to the code and managed to connect to two COM ports and control two motors: Modular Things When I had the rgbb board and the stepper board working, I managed to make them interact with the following code (I named the stepper after myself): Svavar . setCurrentScale ( 0.3 ); Svavar . setVelocity ( 200 ); Svavar . setAccel ( 40 ); var val = 0 ; loop ( async () => { val = await led . getButtonState ( 0 ); console . log ( val ); led . setRGB ( val , val , val ); let pos = 0 ; if ( val == true ){ for ( let i = 0 ; i < 2 ; i ++ ) { pos = pos == 0 ? 5 : 0 ; await Svavar . absolute ( pos ); } } }, 50 ); .md-content__button { display: none; }","title":"Interface"},{"location":"final-project/interface.html#interface","text":"","title":"Interface   "},{"location":"final-project/interface.html#my-own-thing","text":"I tried the PhysicalPixel example that is built into the Arduino IDE and modified it to send commands to the motor. In the video below I am controlling the motors with Python through two serial ports at the same time. The left motor rotates twice and the right motor rotates once. I found a fantastic example of using WebSerial in Py-Script and tried to run it locally. I got the error: Access to Image from origin 'null' has been blocked by CORS policy I found that this is because the code can only be run on a server. I tried to set up a local server using web.py but that didn't work and then I tried uploading the example to my website but the MkDocs system dindn't allow me to access the page. Then I found this tutorial on how to deploy a Py-Script app to GitHub Pages. That was easy, it's just a matter of creating a repository, uploading the files and going into settings and enabling deployment: The repository itself is under the Code tab. Go into the Settings tab, select Pages in the left menu and under Build and Deployment select main , root and click Save. As long as the repository has a file called index.html, the site is now live! Here's a link to the two motor control interface: baks Here I'm connecting to a COM device and sending it the command T12, which means 'Turn by 12 radians', or just about two rotations: It works! I couldn't be more excited! I'm getting close to having the full stack of technologies working. And the interface is live on the web! I made some very rough modifications to the code and managed to connect to two COM ports and control two motors:","title":"My own thing"},{"location":"final-project/interface.html#modular-things","text":"When I had the rgbb board and the stepper board working, I managed to make them interact with the following code (I named the stepper after myself): Svavar . setCurrentScale ( 0.3 ); Svavar . setVelocity ( 200 ); Svavar . setAccel ( 40 ); var val = 0 ; loop ( async () => { val = await led . getButtonState ( 0 ); console . log ( val ); led . setRGB ( val , val , val ); let pos = 0 ; if ( val == true ){ for ( let i = 0 ; i < 2 ; i ++ ) { pos = pos == 0 ? 5 : 0 ; await Svavar . absolute ( pos ); } } }, 50 ); .md-content__button { display: none; }","title":"Modular Things"},{"location":"final-project/motor_control.html","text":"Motor Control Here I'm giving the motor commands from the Arduino serial monitor. I'm using the SimpleFOC Commander interface, which is a set of commands similar to G-code. .md-content__button { display: none; }","title":"Motor Control"},{"location":"final-project/motor_control.html#motor-control","text":"Here I'm giving the motor commands from the Arduino serial monitor. I'm using the SimpleFOC Commander interface, which is a set of commands similar to G-code. .md-content__button { display: none; }","title":"Motor Control   "},{"location":"final-project/presentation.html","text":"Final Project Presentation Presentation slide. 1 minute presentation video. Here I am presenting my final project to Professor Neil Gershenfeld and the instructors and students in the 2023 Fab Academy cycle. Link to the video. .md-content__button { display: none; }","title":"Presentation"},{"location":"final-project/presentation.html#final-project-presentation","text":"Presentation slide. 1 minute presentation video. Here I am presenting my final project to Professor Neil Gershenfeld and the instructors and students in the 2023 Fab Academy cycle. Link to the video. .md-content__button { display: none; }","title":"Final Project Presentation   "}]}
\ No newline at end of file
diff --git a/public/sitemap.xml.gz b/public/sitemap.xml.gz
index 4d63bffde1a6ae7949ad44c96433467e8373e0f3..9f6b988f439347947e66479d18d98562e56baab1 100644
GIT binary patch
delta 14
Vcmcb~c$1M$zMF$X<mp7V%K#$H1lRxo

delta 14
Vcmcb~c$1M$zMF&N&*O<~mjNeD1-<|P

-- 
GitLab