HISE Logo Forum
    • Categories
    • Register
    • Login

    I wasted 3 hours on deepseek trying to create Autotune, Reverb and Delay in Hise

    Scheduled Pinned Locked Moved Scripting
    32 Posts 16 Posters 1.0k Views
    Loading More Posts
    • Oldest to Newest
    • Newest to Oldest
    • Most Votes
    Reply
    • Reply as topic
    Log in to reply
    This topic has been deleted. Only users with topic management privileges can see it.
    • huen97H
      huen97
      last edited by

      When I thought the compile was successful, I assumed everything was okay, but when I tested it, I found that it wasn't working.

      this is autotune

      Content.makeFrontInterface(500, 300);
      
      // ===== 1. 音阶系统实现 =====
      // 使用对象存储音阶数据
      const var Scales = {
          "Chromatic": [0,1,2,3,4,5,6,7,8,9,10,11], // 半音阶
          "Major":     [0,2,4,5,7,9,11],             // 大调音阶
          "Minor":     [0,2,3,5,7,8,10]              // 自然小调
      };
      
      // 存储当前音阶状态
      reg currentScale = Scales.Chromatic;
      reg rootNote = 9; // 默认根音A
      
      // ===== 2. 控件创建 =====
      const var scaleCombo = Content.addComboBox("scaleCombo", 50, 30);
      scaleCombo.set("items", "Chromatic, Major, Minor");  // 注意逗号后的空格
      scaleCombo.set("defaultValue", 0);
      scaleCombo.set("tooltip", "Select musical scale");
      scaleCombo.set("height", 24);  // 明确设置高度
      
      // ===== 2. 根音选择下拉框 =====
      const var rootCombo = Content.addComboBox("rootCombo", 200, 30);
      rootCombo.set("items", "C, C#, D, D#, E, F, F#, G, G#, A, A#, B");  // 逗号后加空格
      rootCombo.set("defaultValue", 9);
      rootCombo.set("tooltip", "Select root note");
      rootCombo.set("height", 24);
      
      // ===== 3. 修正强度旋钮 =====
      const var strengthKnob = Content.addKnob("strength", 350, 30);
      Content.setPropertiesFromJSON("strength", {
          "min": 0,
          "max": 100,
          "stepSize": 1,
          "defaultValue": 80,
          "text": "Strength",
          "tooltip": "Pitch correction strength",
          "height": 30,
      });
      
      function getQuantizedPitch(pitchInHz)
      {
          // 用自然对数计算半音偏移 (替代 Math.log2)
          // 公式: semitone = 12 * (log(pitch/440) / log(2))
          var semitone = 12 * (Math.log(pitchInHz / 440.0) / 0.69314718056); // 0.69314718056 ≈ ln(2)
          
          // 四舍五入到最接近的整数
          semitone = Math.round(semitone);
          
          // 计算音阶内位置
          var scaleDegree = (semitone - rootNote) % currentScale.length;
          if (scaleDegree < 0) scaleDegree += currentScale.length;
          
          // 返回量化后的频率
         return 440.0 * Exp((rootNote + currentScale[scaleDegree]) * 0.05776226505); // 0.05776226505 ≈ ln(2)/12
      }
      
      // ===== 4. 控件回调 =====
      inline function onScaleComboControl(component, value)
      {
          // 通过组件名称访问(避免直接传递control对象)
          currentScale = Scales[scaleCombo.get("text")];
      }
      
      // 绑定回调(使用内联函数)
      scaleCombo.setControlCallback(onScaleComboControl);
      
      // ===== 2. 根音选择回调 =====
      inline function onRootComboControl(component, value)
      {
          rootNote = value; // 直接使用value参数
      }
      
      rootCombo.setControlCallback(onRootComboControl);
      
      // ===== 5. 音频处理块 =====
      function processBlock(data, numChannels, numSamples)
      {
          var strength = strengthKnob.getValue() / 100;
          
          // 单声道处理
          for (var s = 0; s < numSamples; s++)
          {
              var inSample = data[0][s];
              
              // 音高检测(需提前初始化pitchDetector)
              var detectedPitch = pitchDetector.getPitch(inSample);
              
              if (detectedPitch > 0)
              {
                  // 音高量化
                  var targetPitch = getQuantizedPitch(detectedPitch);
                  
                  // 简易音高修正(实际项目应使用Engine.createPitchShifter)
                  var pitchRatio = targetPitch / detectedPitch;
                  data[0][s] = inSample * (1 - strength) + (inSample * pitchRatio) * strength;
                  
                  // 立体声复制
                  if (numChannels > 1) data[1][s] = data[0][s];
              }
          }
      }
      
      // ===== 6. 初始化 =====
      const var fftSize = 1024; // FFT 窗口大小
      reg fftBuffer = [];       // 采样缓冲区
      reg fftPosition = 0;      // 缓冲区位置
      reg lastPitch = 440;      // 最后检测到的音高
      
      // 初始化缓冲区
      for (var i = 0; i < fftSize; i++) {
          fftBuffer[i] = 0;
      }
      
      // ===== 2. 简易音高检测函数 =====
      function detectPitch(sample) {
          // 填充缓冲区
          fftBuffer[fftPosition] = sample;
          fftPosition = (fftPosition + 1) % fftSize;
          
          // 每满一窗口执行检测
          if (fftPosition == 0) {
              // 执行自相关音高检测
              var maxCorrelation = -1;
              var detectedPitch = 0;
              
              for (var lag = 40; lag < fftSize/2; lag++) { // 40≈100Hz下限
                  var correlation = 0;
                  for (var i = 0; i < fftSize - lag; i++) {
                      correlation += fftBuffer[i] * fftBuffer[i + lag];
                  }
                  
                  if (correlation > maxCorrelation) {
                      maxCorrelation = correlation;
                      detectedPitch = Engine.getSampleRate() / lag;
                  }
              }
              
              // 仅接受合理人声范围 (80Hz-1kHz)
              if (detectedPitch > 80 && detectedPitch < 1000) {
                  lastPitch = detectedPitch;
              }
          }
          
          return lastPitch;
      }
      
      // ===== 3. 音频处理块 =====
      function processBlock(data, numChannels, numSamples) {
          for (var s = 0; s < numSamples; s++) 
              var inSample = data[0][s];
              
              // 音高检测(替代pitchDetector)
              var currentPitch = detectPitch(inSample);
              }
      
      
      

      this is Reverb&Delay

      // ===== 1. 参数定义 =====
      const var mixKnob = Content.addKnob("Mix", 0, 0);
      mixKnob.setRange(0, 1, 0.01);
      mixKnob.set("text", "Dry/Wet");
      mixKnob.set("defaultValue", 0.5);
      
      const var sizeKnob = Content.addKnob("Size", 200, 0);
      sizeKnob.setRange(0.1, 10, 0.1);
      sizeKnob.set("text", "Reverb Size");
      sizeKnob.set("defaultValue", 3);
      
      const var feedbackKnob = Content.addKnob("Feedback", 400, 0);
      feedbackKnob.setRange(0, 0.95, 0.01);
      feedbackKnob.set("text", "Delay Feedback");
      feedbackKnob.set("defaultValue", 0.5);
      
      // ===== 2. 延迟线实现 =====
      reg delayBufferL = []; // 左声道延迟缓冲区
      reg delayBufferR = []; // 右声道延迟缓冲区
      reg delayPos = 0;
      const delayMaxSamples = 44100; // 1秒缓冲区
      
      // 初始化缓冲区
      for (i = 0; i < delayMaxSamples; i++) {
          delayBufferL[i] = 0;
          delayBufferR[i] = 0;
      }
      
      // ===== 3. 混响参数 =====
      reg reverbBufferL = [];
      reg reverbBufferR = [];
      reg reverbPos = 0;
      const reverbTime = 0.2 * Engine.getSampleRate(); // 200ms混响
      
      for (i = 0; i < reverbTime; i++) {
          reverbBufferL[i] = 0;
          reverbBufferR[i] = 0;
      }
      
      // ===== 4. 修正后的音频处理 =====
      function processBlock(data, numChannels, numSamples) 
      { for (var s = 0; s < numSamples; s++) 
      // 原始信号(安全获取声道数据)
             var inL = numChannels > 0 ? data[0][s] : 0;
             var inR = numChannels > 1 ? data[1][s] : inL;
             
             // === 延迟处理 ===
                   var delayedL = delayBufferL[delayPos];
                   var delayedR = delayBufferR[delayPos];
                   
                   // 写入新信号(带反馈)
                          delayBufferL[delayPos] = inL + delayedL * feedbackKnob.getValue();
                          delayBufferR[delayPos] = inR + delayedR * feedbackKnob.getValue();
                          
                          // === 混响处理 ===
                          var revL = reverbBufferL[reverbPos] * 0.6;
                          var revR = reverbBufferR[reverbPos] * 0.6;
                          
                          // 写入新混响信号
                                  reverbBufferL[reverbPos] = inL + revL * sizeKnob.getValue();
                                  reverbBufferR[reverbPos] = inR + revR * sizeKnob.getValue();
                                  
                                  // === 混合输出 ===
                                  var wetL = (delayedL + revL) * mixKnob.getValue();
                                  var wetR = (delayedR + revR) * mixKnob.getValue();
                                  
                                  // 输出到声道(确保数组存在)
                                         if (numChannels > 0) data[0][s] = inL * (1 - mixKnob.getValue()) + wetL;
                                         if (numChannels > 1) data[1][s] = inR * (1 - mixKnob.getValue()) + wetR;
                                         
                                         // 更新缓冲区位置
                                         delayPos = (delayPos + 1) % delayMaxSamples;
                                         reverbPos = (reverbPos + 1) % reverbTime;
      }
      
      ustkU griffinboyG 2 Replies Last reply Reply Quote -1
      • ustkU
        ustk @huen97
        last edited by

        @huen97 You cannot process audio in the script interface...
        The process functions should be in C++ third party node, within Scriptnode.

        I don't think DeepSeek or or any other LLM will produce a ready to paste working code for you, though they are great for learning how things kinda work.

        So seeing what you want to achieve shows you need to learn more about how Hise actually works. Then you'll be able to make things step by step, between the interface script, other scripts, modules, Scriptnode, and Third party C++ nodes (or other Faust/RNBO and consort).

        Can't help pressing F5 in the forum...

        Christoph HartC 1 Reply Last reply Reply Quote 3
        • Christoph HartC
          Christoph Hart @ustk
          last edited by

          You didn‘t waste three hours. You learned a valuable lesson about the limits of AI and why everybody makes fun of vibe coding.

          CatABCC 1 Reply Last reply Reply Quote 7
          • griffinboyG
            griffinboy @huen97
            last edited by

            @huen97

            Ai will happily gaslight you.
            It'll tell you all sorts of lies and it will often be unable to acknowledge it's own mistakes.

            You can't program using AI without understanding deeply what you are trying to make, and at least how it works on a basic level.

            I use AI heavily in my coding, but I always understand the algorithms I am making. The AI is simply there because it knows c++ syntax better than me.
            But it will never be able to write an algorithm as good as the one in my own head. There are too many elements and moving parts that go into making DSP, an LLM is unlikely to know all the info required. And there is DSP that is still way out of reach, even if you give it all the info and math it needs it will still fail at things. Ai is particularly bad at audio programming I've found.

            1 Reply Last reply Reply Quote 2
            • CatABCC
              CatABC @Christoph Hart
              last edited by

              @Christoph-Hart 😹 😹 😹 I agree

              //I am a new student at HISE._
              //Windows11 & MacOS Ventura 13.6.7_

              1 Reply Last reply Reply Quote 0
              • CatABCC
                CatABC
                last edited by

                HISE is very friendly to beginners. You can start with

                Console.print("Hello! HISE");
                

                . Believe me, you will soon gain something instead of relying on AI.

                //I am a new student at HISE._
                //Windows11 & MacOS Ventura 13.6.7_

                d.healeyD 1 Reply Last reply Reply Quote 1
                • d.healeyD
                  d.healey @CatABC
                  last edited by d.healey

                  @CatABC You're right. I just asked ChatGPT for a hello world example for HISE and the output is nonsense - I won't post it here because I don't want to pollute the forum. But it would absolutely mislead a beginner.

                  Libre Wave - Freedom respecting instruments and effects
                  My Patreon - HISE tutorials
                  YouTube Channel - Public HISE tutorials

                  A 1 Reply Last reply Reply Quote 2
                  • dannytaurusD
                    dannytaurus
                    last edited by

                    There simply isn't enough HISE-specific content out there for AI to make a decent job of it.

                    I use Cursor (AI-enabled fork of Visual Studio Code) in my daily Ruby coding and the AI does an excellent job on small, focussed tasks. It's doing stuff I could do myself but about 20 faster.

                    They just released a Background Agent that you can use for larger, more complex tasks which I'm looking forward to trying. Prompt it with details of a new feature implementation and let it get to work planning and executing, all with human oversight on the process. Much like Devin, but less expensive.

                    Anyway, the point is, to do a good job AI at the very least needs A LOT of source material to work with. And there isn't enough HISE out there for it to do that.

                    Meat Beats: https://meatbeats.com
                    Klippr Video: https://klippr.video

                    1 Reply Last reply Reply Quote 0
                    • A
                      aaronventure @d.healey
                      last edited by

                      @d-healey That's why you should ask Claude with the Context7 MCP installed

                      // ========================================
                      // HISE Hello World Script Examples
                      // ========================================
                      
                      // 1. BASIC CONSOLE OUTPUT
                      // The simplest way to output "Hello World" in HISE
                      Console.print("Hello World");
                      
                      // 2. CREATING A BASIC USER INTERFACE
                      // Create the main interface (600x500 pixels)
                      Content.makeFrontInterface(600, 500);
                      
                      // Add a button programmatically
                      const var HelloButton = Content.addButton("HelloButton", 10, 10);
                      HelloButton.set("text", "Click for Hello World!");
                      
                      // 3. BUTTON CALLBACK FUNCTION
                      // Define what happens when the button is clicked
                      inline function onHelloButtonControl(component, value)
                      {
                          if(value) // Button was pressed (value = 1)
                          {
                              Console.print("Hello World from Button!");
                              
                              // You can also show a popup message
                              Engine.showMessageBox("Hello World", "Greetings from HISE!", 0);
                          }
                      };
                      
                      // Connect the callback to the button
                      HelloButton.setControlCallback(onHelloButtonControl);
                      
                      // 4. DRAWING TEXT ON A PANEL
                      // Add a panel for custom graphics
                      const var HelloPanel = Content.addPanel("HelloPanel", 10, 70);
                      HelloPanel.set("width", 300);
                      HelloPanel.set("height", 100);
                      
                      // Set a custom paint routine for the panel
                      HelloPanel.setPaintRoutine(function(g)
                      {
                          // Set background color
                          g.setColour(Colours.withAlpha(Colours.blue, 0.3));
                          g.fillRect(this.getLocalBounds(0));
                          
                          // Set text properties
                          g.setFont("Arial", 24);
                          g.setColour(Colours.white);
                          
                          // Draw the hello world text
                          g.drawFittedText("Hello World!", this.getLocalBounds(5), "centred", 1, 1.0);
                      });
                      
                      // 5. CREATING A SIMPLE SOUND GENERATOR
                      // Get reference to a sine wave generator (if one exists in your project)
                      // Note: You need to add a SineWaveGenerator module first in HISE
                      /*
                      const var SineGen = Synth.getChildSynth("Sine Wave Generator");
                      
                      if(isDefined(SineGen))
                      {
                          Console.print("Sine generator found - Hello World with sound!");
                          
                          // Set some basic parameters
                          SineGen.setAttribute(SineGen.SaturationAmount, 0.1);
                          Console.print("Saturation set to: " + SineGen.getAttribute(SineGen.SaturationAmount));
                      }
                      else
                      {
                          Console.print("No sine generator found - add one to test sound generation");
                      }
                      */
                      
                      // 6. WORKING WITH ARRAYS AND LOOPS
                      Console.print("=== Array Hello World ===");
                      const var words = ["Hello", "World", "from", "HISE"];
                      
                      for(i = 0; i < words.length; i++)
                      {
                          Console.print("Word " + i + ": " + words[i]);
                      }
                      
                      // Join the words together
                      var message = "";
                      for(word in words)
                      {
                          message += word + " ";
                      }
                      Console.print("Complete message: " + message);
                      
                      // 7. BASIC MATH OPERATIONS
                      Console.print("=== Math Hello World ===");
                      const var x = 5;
                      const var y = 10;
                      const var result = x + y;
                      Console.print("Math result: " + x + " + " + y + " = " + result);
                      
                      // 8. TIMER-BASED HELLO WORLD
                      // Create a timer that prints hello world every 2 seconds
                      const var HelloTimer = Engine.createTimerObject();
                      HelloTimer.setTimerCallback(function()
                      {
                          Console.print("Timer says: Hello World! " + Engine.getUptime());
                      });
                      
                      // Start the timer (2000ms = 2 seconds)
                      HelloTimer.startTimer(2000);
                      
                      // 9. BASIC FUNCTION DEFINITION
                      inline function sayHello(name)
                      {
                          return "Hello " + name + " from HISE!";
                      }
                      
                      // Call the function
                      Console.print(sayHello("Developer"));
                      Console.print(sayHello("World"));
                      
                      // 10. BASIC CONDITIONAL LOGIC
                      const var currentTime = Engine.getUptime();
                      if(currentTime > 5.0)
                      {
                          Console.print("Hello World - HISE has been running for more than 5 seconds!");
                      }
                      else
                      {
                          Console.print("Hello World - HISE just started!");
                      }
                      
                      Console.print("=== HISE Hello World Script Complete! ===");
                      
                      d.healeyD 1 Reply Last reply Reply Quote 1
                      • d.healeyD
                        d.healey @aaronventure
                        last edited by

                        @aaronventure said in I wasted 3 hours on deepseek trying to create Autotune, Reverb and Delay in Hise:

                        That's why you should ask Claude

                        For us it's useful, but a beginner shouldn't use it for learning scripting because they don't know if the output is good or bad. I can already pick holes in that Claude output but a beginner wouldn't know.

                        Libre Wave - Freedom respecting instruments and effects
                        My Patreon - HISE tutorials
                        YouTube Channel - Public HISE tutorials

                        A 1 Reply Last reply Reply Quote 0
                        • ChazroxC
                          Chazrox
                          last edited by

                          Screenshot 2025-06-07 at 2.38.39 PM.png

                          1 Reply Last reply Reply Quote 1
                          • A
                            Allen
                            last edited by

                            AI is currently really bad at writing DSP code.

                            the reverb&delay code is really looks like something from r/programminghorror btw
                            I can already hear the pop and clicks followed by a loud-ass feedback lol.

                            StraticahS 1 Reply Last reply Reply Quote 0
                            • StraticahS
                              Straticah @Allen
                              last edited by

                              Hise Forum is the best meme page by far.

                              building user interfaces in HISE :)
                              web: www.vst-design.com

                              1 Reply Last reply Reply Quote 2
                              • A
                                aaronventure @d.healey
                                last edited by

                                @d-healey yeah you're right

                                plus as I've said elsewhere, large part of HISE is not code-based

                                No way through HISE currently other than the hard way. which may become increasingly inaccessible as people become super impatient with AI doing their shit for them in seconds in other areas of life. I do often think how impossible it will be for us to relate to the new kids, given how we had to go and read 300 pages of docs before writing some code back in the day.

                                Now it's all just a prompt away.

                                ChazroxC 1 Reply Last reply Reply Quote 2
                                • ChazroxC
                                  Chazrox @aaronventure
                                  last edited by

                                  @aaronventure

                                  You're a Picasso worrying about how to relate to people who call stuff like this art...

                                  5L893dV.jpg

                                  lol

                                  ask them to explain the logic? There is none. Its 'vibe'. I myself appreciate the learning curve and the mid entry level requirements to get something decent done. I can already imagine the MESS ai is gonna make of everything before it makes things better.

                                  Who will maintain all this 💩 code??? lol

                                  d.healeyD A 2 Replies Last reply Reply Quote 0
                                  • d.healeyD
                                    d.healey @Chazrox
                                    last edited by

                                    @Chazrox You'll like this

                                    Libre Wave - Freedom respecting instruments and effects
                                    My Patreon - HISE tutorials
                                    YouTube Channel - Public HISE tutorials

                                    ChazroxC 1 Reply Last reply Reply Quote 0
                                    • ChazroxC
                                      Chazrox @d.healey
                                      last edited by Chazrox

                                      @d-healey

                                      💀 💀 💀
                                      Screenshot 2025-06-08 at 12.43.56 PM.png
                                      They tried it. lol

                                      I love how lazy people are.

                                      1 Reply Last reply Reply Quote 1
                                      • A
                                        aaronventure @Chazrox
                                        last edited by

                                        @Chazrox said in I wasted 3 hours on deepseek trying to create Autotune, Reverb and Delay in Hise:

                                        Who will maintain all this 💩 code??? lol

                                        The point is that you shouldn't look at it. Does it work? Is the performance acceptable? Have you tested for security issues? Just ship it. That's the current way of things. Some of the stuff I'm seeing in new repos popping up is the stuff of nightmares, but hey, it works.

                                        I mean it's incredible, it's literal science fiction stuff, you write an instruction as if you were writing it to a developer on your team, and it becomes reality. If you told me 3 years ago this would be a reality today I would've called you crazy. Of course, currently it's just step by step, so you still need to have the big picture, understand on high level how things work so your instructions can be accurate etc.

                                        But code developed both to be functional and to be readable to humans so they can write it and debug it. This is no longer necessary, so I have no trouble imagining a language coming up in a very near future that is completely oriented to be written by LLMs.

                                        The potential is just too much to ignore.

                                        ChazroxC 1 Reply Last reply Reply Quote 3
                                        • ChazroxC
                                          Chazrox @aaronventure
                                          last edited by

                                          @aaronventure I agree with the potential implications. We're just not there yet obviously and i'll probably make fun of it til then...

                                          Best believe when ai can figure out how to properly write an Auto-Tune in HISE, WE are ALL gonna go crazy and i'll be first in line!

                                          Until then, we're asking an infant to do gymnastics. <-- just more jokes. lol.

                                          I do use ai myself but only for extending index libraries and/or help with math. Other than that, it just makes a mess without proper (in-depth) context.

                                          Based on your experience, what are you're thoughts on best use cases as it stands today?
                                          ...pertaining to HISE of course.

                                          A 1 Reply Last reply Reply Quote 0
                                          • A
                                            aaronventure @Chazrox
                                            last edited by

                                            @Chazrox Writing small pieces of code, like functions. Connect it to context7 so it can get docs, but obviously that only works if the docs are complete, and HISE docs are not.

                                            I did once write a full 3d rendering engine in HISE PaintRoutine, with frustum culling and all the jazz. You'd just pass it a "mesh" like a an array of points relative to a center, pass it a location for the the mesh center, pass it camera specs like xyz, pitch, yaw, fov and it would render the whole scene using line calls. Of course it was all running on the CPU so it wasn't very efficient at all.

                                            I did eventually move to WebView and three.js because that's what three is made for and it runs on the GPU, but this is the kind of stuff where even testing it out meant you had to go and learn computer graphics from complete zero just to implement this here, but the toughest question would be where to even look and what to look for. With AI it was much faster to find the relevant information and get it going.

                                            So in a world where a question "is this even viable to try to implement" can be answered in days instead of months, it's like magic wand.

                                            ChazroxC VirtualVirginV 2 Replies Last reply Reply Quote 2
                                            • First post
                                              Last post

                                            22

                                            Online

                                            1.7k

                                            Users

                                            11.9k

                                            Topics

                                            103.7k

                                            Posts