SOURCE

console 命令行工具 X clear

                    
>
console
let fontData, textAnimationData;
let type = "flower";//transition 、effect 、text 、flower、animation
let canvas = document.getElementById('canvas');
let editor = new Editor(canvas);
let wordArt, wordArt1;
let fontNum = 0;
let animation1, animation2, animation3, textType, stickerNode, textNode, sticker_url, text_url;


let audioNode = {};
let audioTrack = {};
let effectNode = {};
let effectTrack = {};
let materialNode = {};
let videoTrack = {};
let transitionNode = {};
let extraNode = {};

effectNode['j--rm7uBXJUAuna8RQGjLzG'] = editor.effect({
    "title": "tdouyin",
    "description": "Change images to a single chroma (e.g can be used to make a black & white filter). Input color mix and output color mix can be adjusted.",
    "vertexShader": "#define GLSLIFY 1\nattribute vec2 a_position;\nattribute vec2 a_texCoord;\nvarying vec2 v_texCoord;\n\nvoid main() {\n  gl_Position = vec4(a_position, 0.0, 1.0);\n  v_texCoord = a_texCoord;\n}",
    "fragmentShader": "precision mediump float;\n#define GLSLIFY 1\nuniform sampler2D u_image;\nuniform float amp;\nuniform float progress;\nvarying vec2 v_texCoord;\n\nvec4 getFromColor(vec2 v_texCoord) {\n  return texture2D(u_image, v_texCoord);\n}\n\nfloat random (vec2 st) {\n    return fract(sin(dot(st.xy,vec2(12.9898,78.233)))*43758.5453123);\n}\n\nvec4 transition(vec2 uv) {\n  vec2 tmp;\n\n  if (progress <= 0.0 || progress >= 1.0) {\n      return getFromColor(uv);\n  }\n\n  if (progress < 0.2) {\n    tmp = vec2(amp*(0.1-abs(progress-0.1)), amp*(0.1-abs(progress-0.1)));\n  }else if (progress < 0.4) {\n    tmp = vec2(-amp*(0.1-abs(progress-0.3)), -amp*(0.1-abs(progress-0.3)));\n  }else if (progress < 0.6) {\n    tmp = vec2(amp*(0.1-abs(progress-0.5)), -amp*(0.1-abs(progress-0.5)));\n  }else if (progress < 0.8){\n    tmp = vec2(-amp*(0.1-abs(progress-0.7)), amp*(0.1-abs(progress-0.7)));\n  }else{\n    tmp = vec2(amp*(0.1-abs(progress-0.9)), amp*(0.1-abs(progress-0.9)));\n  }\n  vec4 texFrom= vec4(\n      getFromColor(uv - tmp).r,\n      getFromColor(uv).g,\n      getFromColor(uv + tmp).b,\n      1.0);\n  return texFrom;\n}\n\nvoid main() {\n  gl_FragColor = transition(v_texCoord);\n}\n",
    "inputs": [
        "u_image"
    ],
    "properties": {
        "amp": {
            "type": "uniform",
            "value": 0.2
        }
    }
})

effectNode['j--rm7uBXJUAuna8RQGjLzG'].effect(0.18, 3.18)
materialNode['7-s1AOYeIpVBWFdpi1kVbfq'] = editor.image('https://p9-tada-sign.byteimg.com/tos-cn-i-lqsqysnsqa/5f57f73a8a604116afb324892f39e326~tplv-lqsqysnsqa-image.jpeg?x-expires=1634022458&x-signature=cs5oW8h7IZaePzAx6jzn5bchfsU%3D', {
    "width": 720,
    "height": 540,
    "left": 0,
    "top": 370,
    "useSourceResolution": false
})
materialNode['7-s1AOYeIpVBWFdpi1kVbfq'].timelineStart(0)
materialNode['7-s1AOYeIpVBWFdpi1kVbfq'].timelineStop(3)
let bgTrack = editor.track({ type: 'video', zIndex: -1 });
let bgNode = editor.image('https://lf3-static.bytednsdoc.com/obj/eden-cn/4201eh7nuhpephbozupq/creative-video-editor/vertical.jpg', {
    left: 0,
    top: 0,
    width: 720,
    height: 1280,
    zIndex: -1,
});
bgNode.timelineStart(0);
bgNode.timelineStop(3.18);
bgTrack.addNode({ node: bgNode });

videoTrack['8-s1AOYeIpVBWFdpi1kVbfq'] = editor.track({ type: 'video', zIndex: 99 })
videoTrack['8-s1AOYeIpVBWFdpi1kVbfq'].addNode({ node: materialNode['7-s1AOYeIpVBWFdpi1kVbfq'] })
effectTrack['k--rm7uBXJUAuna8RQGjLzG'] = editor.track({ type: 'effect' })
effectTrack['k--rm7uBXJUAuna8RQGjLzG'].addEffect({
    effect: effectNode['j--rm7uBXJUAuna8RQGjLzG'],
    secondaryNode: [],
})
materialNode['7-s1AOYeIpVBWFdpi1kVbfq'].timelineStart(0)
materialNode['7-s1AOYeIpVBWFdpi1kVbfq'].timelineStop(3)
effectNode['j--rm7uBXJUAuna8RQGjLzG'].effect(0.18, 3.18)
effectTrack['k--rm7uBXJUAuna8RQGjLzG'].removeEffect({
    effect: effectNode['j--rm7uBXJUAuna8RQGjLzG'],
    secondaryNode: [],
})
effectNode['7-bCQnMa30jwWG9Uvq2xpCa'] = editor.effect({
    "title": "lut",
    "vertexShader": "#define GLSLIFY 1\nattribute vec2 a_position;\nattribute vec2 a_texCoord;\nvarying vec2 v_texCoord;\n\nvoid main() {\n    gl_Position = vec4(a_position, 0.0, 1.0);\n    v_texCoord = a_texCoord;\n}\n",
    "fragmentShader": "precision mediump float;\n#define GLSLIFY 1\n\nuniform float power;\nuniform sampler2D from;\nuniform sampler2D lut;\n\nvarying vec2 v_texCoord;\n\nvec4 transition(vec2 invUv){\n    vec2 useUv = vec2(invUv.x, invUv.y);\n\n    vec4 textureColor = texture2D(from, useUv);\n\n    float blueColor = textureColor.b * 63.0;\n\n    vec2 quad1;\n    quad1.y = floor(floor(blueColor) / 8.0);\n    quad1.x = floor(blueColor) - (quad1.y * 8.0);\n\n    vec2 quad2;\n    quad2.y = floor(ceil(blueColor) / 8.0);\n    quad2.x = ceil(blueColor) - (quad2.y * 8.0);\n\n    vec2 texPos1;\n    texPos1.x = (quad1.x * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * textureColor.r);\n    texPos1.y = 1. - ((quad1.y * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * textureColor.g));\n\n    vec2 texPos2;\n    texPos2.x = (quad2.x * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * textureColor.r);\n    texPos2.y = 1. - ((quad2.y * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * textureColor.g));\n\n    vec4 newColor1 = texture2D(lut, texPos1);\n    vec4 newColor2 = texture2D(lut, texPos2);\n\n    vec4 newColor = mix(newColor1, newColor2, fract(blueColor));\n    return mix(textureColor, vec4(newColor.rgb, textureColor.w), power);\n}\n\nvoid main() {\n    gl_FragColor = transition(v_texCoord);\n}\n\n",
    "inputs": [
        "from",
        "lut"
    ],
    "properties": {
        "power": {
            "type": "uniform",
            "value": 1
        }
    }
})
effectNode['7-bCQnMa30jwWG9Uvq2xpCa'].power = 1
effectNode['7-bCQnMa30jwWG9Uvq2xpCa'].effect(0, 3)
extraNode['cik1rp7clpi'] = editor.image('https://sf1-ttcdn-tos.pstatp.com/obj/ad-material-store/image/de766d2227c0ac2963590ed14c9af5ca')
extraNode['cik1rp7clpi'].timelineStart(0)
extraNode['cik1rp7clpi'].timelineStop(3)
effectTrack['8-bCQnMa30jwWG9Uvq2xpCa'] = editor.track({ type: 'effect' })
effectTrack['8-bCQnMa30jwWG9Uvq2xpCa'].addEffect({
    effect: effectNode['7-bCQnMa30jwWG9Uvq2xpCa'],
    secondaryNode: [extraNode['cik1rp7clpi']],
})
effectNode['7-bCQnMa30jwWG9Uvq2xpCa'].effect(0, 3)
extraNode['cik1rp7clpi'].timelineStart(0)
extraNode['cik1rp7clpi'].timelineStop(3)
effectNode['a-bCQnMa30jwWG9Uvq2xpCa'] = editor.effect({
    "title": "animationLeftRightWiggle",
    "vertexShader": "#define GLSLIFY 1\nattribute vec2 a_position;\nattribute vec2 a_texCoord;\nvarying vec2 v_texCoord;\n    \nvoid main() {\n  gl_Position = vec4(a_position, 0.0, 1.0);\n  v_texCoord = a_texCoord;\n}",
    "fragmentShader": "precision mediump float;\n#define GLSLIFY 1\n\nuniform float progress;\nuniform sampler2D u_image;\nvarying vec2 v_texCoord;\n\nvec4 getFromColor(vec2 v_texCoord) {\n    return texture2D(u_image, v_texCoord);\n}\n\nconst float blurStep=0.5;\nconst vec2 blurDirection = vec2(0.,0.);\nconst int num = 10;\nconst float PI = 3.14159265;\n\nfloat easeInOutCubic(float x)\n{\n    return x < 0.5 ? 4.0 * x * x * x : 1.0 - pow( -2.0 * x + 2.0, 3.0 ) / 2.0;\n}\n\nvec2 directionMove(vec2 videoImageCoord,vec2 resolution,vec2 directionOfMotion,float speed, float time)\n{\n    videoImageCoord = videoImageCoord + directionOfMotion * speed * time;\n    return videoImageCoord;\n}\n\nfloat funcEaseBlurAction(float t) {\n    return (-7. * exp(-7.0 * t) * 1.0 * sin((t - 0.075) * (2.0*PI) / 0.3) + exp(-7.0 * t) * 1.0 * cos((t - 0.075) * (2.0*PI) / 0.3) * 2.0 * PI / 0.3) * 0.1;\n}\n\nvec3 directionBlur(vec2 resolution,vec2 uv,vec2 directionOfBlur,float intensity, float p)\n{\n    vec2 pixelStep = 1.0/resolution * blurStep * funcEaseBlurAction(p) * intensity;\n    float dircLength = length(directionOfBlur);\n    if(dircLength > 0.01)\n    {\n        pixelStep.x = directionOfBlur.x * 1.0 / dircLength * pixelStep.x;\n        pixelStep.y = directionOfBlur.y * 1.0 / dircLength * pixelStep.y;\n    }\n\n    vec3 color = vec3(0);\n    for(int i = -num; i <= num; i++)\n    {\n        vec2 blurCoord = uv + pixelStep * float(i) ;\n        blurCoord = abs(blurCoord);\n        if(blurCoord.x > 1.0)\n        blurCoord.x = 1.0 - fract(blurCoord.x);\n        if(blurCoord.y > 1.0)\n        blurCoord.y = 1.0 - fract(blurCoord.y);\n\n        color += getFromColor(blurCoord).rgb;\n    }\n    color /= float(2 * num + 1);\n    return color;\n}\n\nfloat funcEaseAction(float t) {\n    if(t> 0.0 && t< 1.0) {\n        return exp(-7.0 * t) * 1.0 * sin((t - 0.075) * (2.0* PI) / 0.3) + 1.0;\n    }\n    return t;\n}\n\nvec2 getXYOffset( vec2 fromOffset, vec2 toOffset, float prog) {\n    vec2 _offset = vec2(.0,.0);\n    float _p =1. -funcEaseAction(prog);\n    float offset_x =(toOffset.x - fromOffset.x) * _p;\n    float offset_y = (toOffset.y - fromOffset.y) * _p;\n    return vec2(offset_x,offset_y);\n}\n\nfloat getRotateOffset(float fromRotate, float toRotate, float prog) {\n    float p = 1.-funcEaseAction(prog);\n    float rotateDeg = (toRotate - fromRotate) * p;\n    return rotateDeg;\n}\n\nvec2 rotateXY(vec2 pos, float deg) {\n    float radian = PI/180. * deg;\n    float x = pos.x*cos(radian) - pos.y*sin(radian);\n    float y = pos.x*sin(radian) + pos.y*cos(radian);\n    return vec2(x,y);\n}\n\nvec2 getScaleOffset(vec2 fromScale, vec2 toScale, float p) {\n    float _p = funcEaseAction(p);\n    float scale_x = fromScale.x + (toScale.x - fromScale.x) * _p;\n    float scale_y = fromScale.y + (toScale.y - fromScale.y) * _p;\n    return vec2(scale_x, scale_y);\n}\n\nvec4 transition(vec2 uv)\n{\n    if (progress <= 0.0) {\n        return getFromColor(uv);\n    } else if (progress >= 1.0) {\n        return getFromColor(uv);\n    }\n    vec2 resolution = vec2(200.,200.);\n    vec2 rotateCenter = uv * 0.5;\n    vec2 realCoord = uv * resolution;\n\n    vec3 resultColor = vec3(0.0);\n    float speed = 2.0;\n    realCoord = directionMove(realCoord,resolution,blurDirection,speed,0.0);\n    uv = uv - 0.5;\n    uv = uv / getScaleOffset(vec2(1.3,1.3),vec2(1.,1.), progress);\n    uv = uv + getXYOffset(vec2(.6, .0), vec2(.0,.0),progress);\n    uv = uv + 0.5;\n    vec2 texUv = uv;\n    resultColor = directionBlur(resolution,texUv,blurDirection, 1.0, progress);\n\n    if(uv.x >= 0. && uv.x <=1. && uv.y >=0. && uv.y <=1.) {\n        return vec4(resultColor,1.0);\n    }else {\n        return vec4(.0,.0,.0,1.);\n    }\n}\n\nvoid main() {\n  gl_FragColor = transition(v_texCoord);\n}\n",
    "inputs": [
        "u_image"
    ]
})
effectNode['a-bCQnMa30jwWG9Uvq2xpCa'].effect(0, 3)
effectTrack['b-bCQnMa30jwWG9Uvq2xpCa'] = editor.track({ type: 'effect' })
effectTrack['b-bCQnMa30jwWG9Uvq2xpCa'].addEffect({
    effect: effectNode['a-bCQnMa30jwWG9Uvq2xpCa'],
    secondaryNode: [],
})
effectNode['a-bCQnMa30jwWG9Uvq2xpCa'].effect(0, 3)
effectTrack['8-bCQnMa30jwWG9Uvq2xpCa'].removeEffect({
    effect: effectNode['7-bCQnMa30jwWG9Uvq2xpCa'],
    secondaryNode: [extraNode['cik1rp7clpi']],
})
effectTrack['8-bCQnMa30jwWG9Uvq2xpCa'].destroy()

initEvent();
initEditor();

function initEvent() {
    let removeNodeDom = document.getElementById('removeNode');
    // removeNodeDom.onclick = () => {
    //     audioNode['a-THmwCZf0cMOmWUGVOmZ9H'] = editor.audio('https://lf3-static.bytednsdoc.com/obj/eden-cn/4201eh7nuhpephbozupq/creative-video-editor/1234.m4a', {
    //         "attributes": {
    //             "loop": true,
    //             "volume": 1
    //         },
    //         "sourceOffset": 0,
    //         "useEffect": true,
    //         "type": "fade",
    //         "fade": {
    //             "inTime": 0,
    //             "outTime": 0
    //         },
    //         "playbackRate": 1,
    //         "playbackRateType": "wasm",
    //         "useSourceResolution": true
    //     })
    //     audioNode['a-THmwCZf0cMOmWUGVOmZ9H'].timelineStart(0)
    //     audioNode['a-THmwCZf0cMOmWUGVOmZ9H'].timelineStop(30.898)

    //     audioNode['7-THmwCZf0cMOmWUGVOmZ9H'].setSourceOffset(0)
    //     audioNode['7-THmwCZf0cMOmWUGVOmZ9H'].timelineStart(0)
    //     audioNode['7-THmwCZf0cMOmWUGVOmZ9H'].timelineStop(3)

    //     audioTrack['8-THmwCZf0cMOmWUGVOmZ9H'].addNode({ node: audioNode['a-THmwCZf0cMOmWUGVOmZ9H'] })
    //     audioNode['a-THmwCZf0cMOmWUGVOmZ9H'].volume = 1
    // }
}

function initEditor() {
    let graphCanvas = document.getElementById('graph');
    Editor.utils.visualiseVideoContextGraph(editor, graphCanvas, true);
    editor.on('timeupdate', () => {
        Editor.utils.visualiseVideoContextGraph(editor, graphCanvas);
    })
    InitVisualisations(editor, 'timeline-canvas', 'currentTime');
    playButton.onclick = () => {
        editor.play();
    }
    pauseButton.onclick = editor.pause.bind(editor);
    let interactiveBox = editor.initInteractiveBox(wrapper, {
        rotatable: true,
    });
}
 <div class="wrapper" id="wrapper">
    <canvas width="720" height="1280" id="canvas"></canvas>
  </div>
  <button id="playButton">play</button>
  <button id="pauseButton">pause</button></br>
  <button id="removeNode">轨道一移除节点</button>
  <button id="addNode">轨道二添加节点</button>
  <p id="currentTime">
  </p>
  <p>
    <canvas id="timeline-canvas" width="640" height="20"></canvas>
  </p>
  <canvas width="640" height="360" id="graph"></canvas>
html {
    font-family: monospace;
    color: #333;
    /* background-color: black; */
  }
  #current {
    font-size: 12px;
  }
  #canvas {
    width: 320px;
    height: 640px;
  }
  #canvas2 {
    width: 640px;
    height: 360px;
    position: absolute;
    left: 0px;
    top: 0px;
    z-index: 10;
  }
  #container {
    position: relative;
  }

  button {
    line-height: 1.499;
      position: relative;
      display: inline-block;
      font-weight: 400;
      white-space: nowrap;
      text-align: center;
      background-image: none;
      border: 1px solid transparent;
      cursor: pointer;
      user-select: none;
      height: 32px;
      padding: 0 15px;
      font-size: 14px;
      border-radius: 4px;
      color: rgba(0,0,0,0.65);
      background-color: #fff;
      border-color: #d9d9d9;
      outline: 0;
      transition: all .3s cubic-bezier(.645, .045, .355, 1);
  }

  button:hover {
    color: #40a9ff;
    background-color: #fff;
    border-color: #40a9ff;
  }

  button:active {
    color: #096dd9;
    background-color: #fff;
    border-color: #096dd9;
  }

  input[type="text"] {
    -webkit-appearance: none;
    background-color: #fff;
    background-image: none;
    border-radius: 4px;
    border: 1px solid #dcdfe6;
    box-sizing: border-box;
    color: #606266;
    display: inline-block;
    font-size: inherit;
    height: 32px;
    line-height: 32px;
    outline: none;
    padding: 0 8px;
    transition: border-color 0.2s cubic-bezier(0.645, 0.045, 0.355, 1);
    width: 50px;
  }

  .wrapper {
    width: 320px;
    height: 640px;
    position: relative;
  }

@font-face {
  font-family: "hardFont";
  src: url('./../../assets/hardFont.ttf');
}
#bg-video{
    width:640px;
    height:400px;
}
#changeSrc{
    width:80%;
    height:40px;
    line-height: 40px;
}

本项目引用的自定义外部资源