Wechat Embedded Video 2 (Material from Tencent Sun Shangxiang Doomsday Machine Skin Promotion H5)

Keywords: Front-end Mobile

Combining the previous two analyses

Wechat Embedded Video 1 (Case Analysis)
https://segmentfault.com/a/11...
From AnimateCC to CreateJS
https://segmentfault.com/a/11...

This time it's a demo that createjs completes UI with inline video

Because we forgot the original address, we can only demo for ourselves to run a simple effect.

You need to simply build a server to run or create JS can reference image resources

A little analysis and summary of the implementation ideas

It is mainly implemented by two tags.

video Label Implements Embedded video Playing on Mobile Phone

canvas tags control the UI layer and manipulate video when interacting with the UI

First, export the fla file to code

The generated code is as follows:

<script>
var canvas, stage, exportRoot, anim_container, dom_overlay_container, fnStartAnimation;
function init() {
   canvas = document.getElementById("canvas");
   anim_container = document.getElementById("animation_container");
   dom_overlay_container = document.getElementById("dom_overlay_container");
   images = images||{};
   ss = ss||{};
   var loader = new createjs.LoadQueue(false);
   loader.addEventListener("fileload", handleFileLoad);
   loader.addEventListener("complete", handleComplete);
   loader.loadManifest(lib.properties.manifest);
}
function handleFileLoad(evt) { 
   if (evt.item.type == "image") { images[evt.item.id] = evt.result; }    
}
function handleComplete(evt) {
   //This function is always called, irrespective of the content. You can use the variable "stage" after it is created in token create_stage.
   var queue = evt.target;
   var ssMetadata = lib.ssMetadata;
   for(i=0; i<ssMetadata.length; i++) {
      ss[ssMetadata[i].name] = new createjs.SpriteSheet( {"images": [queue.getResult(ssMetadata[i].name)], "frames": ssMetadata[i].frames} )
   }
   exportRoot = new lib.demo3();
   stage = new createjs.Stage(canvas);
   stage.addChild(exportRoot);    
   //Registers the "tick" event listener.
   fnStartAnimation = function() {
      createjs.Ticker.setFPS(lib.properties.fps);
      createjs.Ticker.addEventListener("tick", stage);
   }      
   //Code to support hidpi screens and responsive scaling.
   function makeResponsive(isResp, respDim, isScale, scaleType) {    
      var lastW, lastH, lastS=1;    
      window.addEventListener('resize', resizeCanvas);      
      resizeCanvas();       
      function resizeCanvas() {        
         var w = lib.properties.width, h = lib.properties.height;         
         var iw = window.innerWidth, ih=window.innerHeight;       
         var pRatio = window.devicePixelRatio || 1, xRatio=iw/w, yRatio=ih/h, sRatio=1;       
         if(isResp) {                
            if((respDim=='width'&&lastW==iw) || (respDim=='height'&&lastH==ih)) {                    
               sRatio = lastS;                
            }           
            else if(!isScale) {                
               if(iw<w || ih<h)                  
                  sRatio = Math.min(xRatio, yRatio);          
            }           
            else if(scaleType==1) {                
               sRatio = Math.min(xRatio, yRatio);          
            }           
            else if(scaleType==2) {                
               sRatio = Math.max(xRatio, yRatio);          
            }        
         }        
         canvas.width = w*pRatio*sRatio;          
         canvas.height = h*pRatio*sRatio;
         canvas.style.width = dom_overlay_container.style.width = anim_container.style.width =  w*sRatio+'px';           
         canvas.style.height = anim_container.style.height = dom_overlay_container.style.height = h*sRatio+'px';
         stage.scaleX = pRatio*sRatio;        
         stage.scaleY = pRatio*sRatio;        
         lastW = iw; lastH = ih; lastS = sRatio;       
      }
   }
   makeResponsive(true,'height',true,1);  
   fnStartAnimation();
}
</script>

Unlike the previous release, this release has an additional piece of adaptive screen size code, which is a new feature of animateCC2017.

In the release settings

Check the above content, animateCC will generate adaptive mobile phone wide and high pages
ok, to get to the point, we need to interact with animation and video, and we need to trigger some events in animate frames.
A class of createJs needs to be used
Event Dispatcher provides methods for managing queue event listeners and event distribution.

var model = new createjs.EventDispatcher();

new an instance then

model.addEventListener("videoPlay1", function () {
    Loop(video, 0)
})

Listen for a triggered event
Then open the fla file with animateCC

Open page1

As you can see, I've added some code snippets to the timeline.

Right-click corresponding frame selection action

/* Mouse Click Event
 Clicking on this specified component instance executes functions in which you can add your own custom code.

Explain:
1. Add your custom code on a new line after the following line "// Start Your Custom Code".
When you click on this component instance, this code executes.
*/

this.enter_btn.addEventListener("click", fl_MouseClickHandler.bind(this));

function fl_MouseClickHandler()
{
    // Start your custom code
    // This sample code displays "Clicked Mouse" in the Output panel.
    console.log(111)
    this.gotoAndPlay(20);
    if(model)model.dispatchEvent("videoPlay1");
    // End your custom code
}

See the existing code in the action window of the pop-up window
This code needs to be explained.
The variable enter_btn needs to be named by itself for an element to be invoked here.

Click on this element


This name needs to be named in the frame in which the element first appears on the timeline, otherwise it may not be invoked.

So the above code clicks this button, jumps to frame 20, and triggers the "video Play1" event.
And we use the model class that we just listened for in the new time to listen for this event in the external js, so we can operate the video in the corresponding place.

function Loop(obj, index) {
    if (obj.timer) {
        clearInterval(obj.timer);
    }
    obj.play();
    obj.timer = setInterval(function () {
        var t = obj.currentTime;
        //Video Cycle Time Point
        if (Math.abs(t - videoTimeArr[3 * index + 1]) <= .1) {
            obj.currentTime = videoTimeArr[3 * index];
            // obj.play();
            // clearInterval(obj.timer);
        }
        //Animation Play Time Point
        if (Math.abs(t - videoTimeArr[3 * index + 2]) <= .1) {
            playAnimate(index);
        }
    }, 20);
}

Input the video object and turn on a timer to determine whether the video progress has reached the desired cyclic point, and if so, return to the initial point for cyclic playback.
There is also a return button, and the button also has the corresponding video clip returned. When clicking on the return, the corresponding video clip is played and returned to the target time point.

function unloop(obj, index) {
    if (obj.timer) {
        clearInterval(obj.timer);
    }
    obj.currentTime = videoBackwardTimeArr[3 * index];
    obj.play();
    obj.timer = setInterval(function () {
        var t = obj.currentTime;
        if (Math.abs(t - videoBackwardTimeArr[3 * index + 1]) <= .1) {
            obj.currentTime = videoBackwardTimeArr[3 * index + 2];
            obj.play();
            Loop(obj, 0)
            var view = stage.children[0].view;
            view.gotoAndStop(29)
        }
    }, 20);
}

Below is the demo code. Thank you.

Links: http://pan.baidu.com/s/1mijrS7Y Password: pth1

Posted by I Am Chris on Sat, 06 Apr 2019 18:18:30 -0700