-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathgraffiti.html
903 lines (783 loc) · 29.7 KB
/
graffiti.html
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
<!--
/*
* License for original modified code:
* Copyright 2017 Google Inc. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Modifications Copyright 2018 Alexander Sekula, Daniel Murphy, Joshua
* Chipman, and Joseph Romano.
*
*/
-->
<!DOCTYPE html>
<html lang="en">
<head>
<title>GraffitiGala</title>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, user-scalable=no,
minimum-scale=1.0, maximum-scale=1.0">
<style>
body {
font-family: monospace;
margin: 0;
overflow: hidden;
position: fixed;
width: 100%;
height: 100vh;
-webkit-user-select: none;
user-select: none;
}
#info {
position: absolute;
left: 50%;
bottom: 0;
transform: translate(-50%, 0);
margin: 1em;
z-index: 10;
display: block;
line-height: 2em;
text-align: center;
width: 300px;
}
#info * {
color: #fff;
}
#canvas_div {
position: absolute;
top: 0;
left: 0;
transform: translate(-50%, 0);
z-index: 10;
display: block;
line-height: 2em;
text-align: center;
width: 0%;
}
#calibrateButton {
position: absolute;
z-index: 100;
}
.bottomButton {
position: absolute;
z-index: 100;
}
.title {
background-color: rgba(40, 40, 40, 0.4);
padding: 0.4em 0.6em;
border-radius: 0.1em;
}
.links {
background-color: rgba(40, 40, 40, 0.6);
padding: 0.4em 0.6em;
border-radius: 0.1em;
}
canvas {
position: absolute;
top: 0;
left: 0;
}
</style>
</head>
<body>
<button id="calibrateButton" type="button" onclick="calibrate()" style="width:100px;height:20px" hidden="true">Calibrate</button>
<button id="recalibrateButton" class="bottomButton" type="button" onclick="recalibrate()" style="width:100px;height:20px" hidden="true">Recalibrate</button>
<button id="cancelButton" class="bottomButton" type="button" onclick="cancelCalibrate()" style="width:100px;height:20px" hidden="true">Cancel</button>
<div id='canvas_div'>
<span id="myCanvas">
<canvas id="real_canvas" width="1000" height="1000"></canvas>
</span>
<!-- <div id="extra_box" style="width:100px;height:100px;border:3px solid #F00;">This is a rectangle!</div> -->
</div>
<script>
// color should be either 'red' or 'green'. Should be strings.
function drawBox(color) {
var canvas = document.getElementById('real_canvas');
var context = canvas.getContext('2d');
// Clears the canvas.
context.clearRect(0, 0, canvas.width, canvas.height);
context.beginPath();
context.rect(document.documentElement.clientWidth / 2 - 50, document.documentElement.clientHeight / 2 - 50, 100, 100);
context.lineWidth = 3;
context.strokeStyle = color;
context.stroke();
}
drawBox('red');
// document.getElementById("canvas_div").style.top = (document.documentElement.clientWidth / 2 - 50) + "px";
// document.getElementById("canvas_div").style.left = (document.documentElement.clientHeight / 2 - 50) + "px";
document.getElementById("calibrateButton").style.top = (document.documentElement.clientHeight / 2 + 60) + "px";
document.getElementById("calibrateButton").style.left = (document.documentElement.clientWidth / 2 - 50) + "px";
document.getElementById("recalibrateButton").style.top = (document.documentElement.clientHeight - 30) + "px";
document.getElementById("recalibrateButton").style.left = (document.documentElement.clientWidth / 2 - 50) + "px";
document.getElementById("cancelButton").style.top = (document.documentElement.clientHeight - 30) + "px";
document.getElementById("cancelButton").style.left = (document.documentElement.clientWidth / 2 - 50) + "px";
var calibrationSuccessful = false; // This is the only time this is set to false.
var beta = 0;
var gamma = 0;
var orientationTolerance = 3;
var calibrationMessage = "";
if (window.DeviceOrientationEvent) {
window.addEventListener("deviceorientation", deviceOrientationListener);
}
function deviceOrientationListener(event) {
beta = event.beta;
gamma = event.gamma;
}
</script>
<div id="info">
<span id="calibration_message">Keep your phone level <br/>and place box over qr code.<br/>Orientation values:<br/></span>
<span id="calibration_coords">coords here</span></br>
<span id="debug_output" hidden="true">Debug message here.</span><br/>
</div>
<script src="https://www.gstatic.com/firebasejs/4.9.0/firebase.js"></script>
<script>
// Initialize Firebase
var config = {
apiKey: "",
authDomain: "",
databaseURL: "",
projectId: "",
storageBucket: "",
messagingSenderId: ""
};
firebase.initializeApp(config);
</script>
<script src="./third_party/three.js/three.js"></script>
<script src="./third_party/three.js/VRControls.js"></script>
<script src="./third_party/three.ar.js"></script>
<script id="fragmentShader" type="shader">
precision mediump float;
precision mediump int;
varying vec3 vPosition;
varying vec3 vNormal;
varying vec2 vUv;
varying vec3 vVelocity;
void main() {
vec2 uv = vUv;
uv *= 2.0;
uv -= 1.0;
float aa = 1.0 - abs( uv.y );
aa = smoothstep( 0.0, 0.05, aa );
gl_FragColor = vec4( vNormal, aa );
}
</script>
<script id="vertexShader" type="shader">
uniform mat4 modelViewMatrix;
uniform mat4 projectionMatrix;
uniform mat3 normalMatrix;
attribute vec3 position;
attribute vec3 normal;
attribute vec2 uv;
attribute vec3 velocity;
varying vec3 vPosition;
varying vec3 vNormal;
varying vec2 vUv;
varying vec3 vVelocity;
void main()
{
vPosition = position;
vNormal = normal;
vUv = uv;
vVelocity = velocity;
gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);
}
</script>
<script>
var vrDisplay;
var vrControls;
var arView;
var canvas;
var camera;
var scene;
var renderer;
// Drawing constants.
var MINIMIM_POINT_DISTANCE = 0.001;
var MINIMUM_STROKE_POINTS = 2;
var MINIMUM_BRUSH_POINTS = 2;
var STROKE_DISTANCE = 0.1;
var STROKE_WIDTH_EASING = 0.05;
var STROKE_WIDTH_MINIMUM = 0.00125;
var STROKE_WIDTH_MAXIMUM = 0.1;
var STROKE_WIDTH_MODIFIER = 1.25;
var STROKE_POSITION_EASING = 0.5;
var STROKE_VELOCITY_EASING = 0.5;
var STROKE_ORTHOGONAL_EASING = 0.5;
var STROKE_NORMAL_EASING = 0.95;
var STROKE_NORMAL_QUAT_EASING = 0.5;
var MINIMIM_ERASE_ENERGY = 0.005;
var MINIMIM_ERASE_ENERGY_FRAMES = 15;
// TOOLS
var SAMPLERATE = 6; // This is the fraction of samples (1/x) we take for drawing
// Setup drawing variables.
var drawing = false;
var curStrokeRef = null;
var stroke = [];
var generatedStrokes = {};
var brush;
var brushStroke = [];
// initial position recorded when calibrated
var initialPosition;
var initialQuaternion;
// Database
var database;
var roomRef;
var drawMaterial = new THREE.RawShaderMaterial({
vertexShader: document.getElementById( 'vertexShader' ).textContent,
fragmentShader: document.getElementById( 'fragmentShader' ).textContent,
side: THREE.DoubleSide,
transparent: true
});
/**
* Use the `getARDisplay()` utility to leverage the WebVR API
* to see if there are any AR-capable WebVR VRDisplays. Returns
* a valid display if found. Otherwise, display the unsupported
* browser message.
*/
THREE.ARUtils.getARDisplay().then(function (display) {
if (display) {
vrDisplay = display;
init();
} else {
THREE.ARUtils.displayUnsupportedMessage();
}
});
function init() {
// Turn on the debugging panel
// var arDebug = new THREE.ARDebug(vrDisplay);
// document.body.appendChild(arDebug.getElement());
// Setup firebase database service
database = firebase.database();
// Setup the three.js rendering environment
renderer = new THREE.WebGLRenderer({ alpha: true });
renderer.setPixelRatio(window.devicePixelRatio);
renderer.setSize(window.innerWidth, window.innerHeight);
renderer.autoClear = false;
canvas = renderer.domElement;
document.body.appendChild(canvas);
scene = new THREE.Scene();
// Creating the ARView, which is the object that handles
// the rendering of the camera stream behind the three.js
// scene
arView = new THREE.ARView(vrDisplay, renderer);
// The ARPerspectiveCamera is very similar to THREE.PerspectiveCamera,
// except when using an AR-capable browser, the camera uses
// the projection matrix provided from the device, so that the
// perspective camera's depth planes and field of view matches
// the physical camera on the device.
camera = new THREE.ARPerspectiveCamera(vrDisplay, 60, window.innerWidth / window.innerHeight, 0.01, 100);
// VRControls is a utility from three.js that applies the device's
// orientation/position to the perspective camera, keeping our
// real world and virtual world in sync.
vrControls = new THREE.VRControls(camera);
// Bind our event handlers
window.addEventListener('resize', onWindowResize, false);
// Start drawing when the user touches the screen.
renderer.domElement.addEventListener('touchstart', function(event) {
if (calibrationSuccessful) {
drawing = true;
// no strokeId, stroke not currently being saved in the database
if (curStrokeRef == null) {
curStrokeRef = roomRef.push();
}
}
});
// Stop the current draw stroke when the user finishes the touch.
renderer.domElement.addEventListener('touchend', function(event) {
if (calibrationSuccessful) {
drawing = false;
// no strokeId, stroke not currently being saved in the database
curStrokeRef = null;
stroke.length = 0;
}
});
// unhide calibrate button
document.getElementById("calibrateButton").hidden = false;
update();
}
timeChecker = {};
function downloadAndListen() {
// TODO: REMOVE OLD LISTENERS AND OLD SCENE DATA FOR RECALIBRATION
// ***** right now if you call it multiple times, it fucks stuff up
// database.ref("rooms/1").set([]);
roomRef = database.ref("rooms/1");
roomRef.on("child_added", function(data) {
// debugger;
if (curStrokeRef && (data.key == curStrokeRef.key)) {
return;
}
console.log("Child added with val: " + data.val());
updateStroke(data.key, data.val());
timeChecker[data.key] = Date.now();
});
roomRef.on("child_changed", function(data) {
// debugger;
if (Date.now() - timeChecker[data.key] > 100) {
if (curStrokeRef && (data.key == curStrokeRef.key)) {
return;
}
console.log("Child changed with val: " + data.val());
updateStroke(data.key, data.val());
timeChecker[data.key] = Date.now();
}
});
}
// TODO: Join the hidden=true/falses into one nicely nested div.
function calibrate() {
// If camera position is exactly 0, 0, 0, then the camera hasn't found its location yet,
// and we want to prevent the user from calibrating. This is hacky but we don't know
// how else to detect whether the camera found its position.
if (!(camera.position.x == 0 && camera.position.y == 0 && camera.position.z == 0)) {
if (beta <= orientationTolerance && beta >= -orientationTolerance &&
gamma <= orientationTolerance && gamma >= -orientationTolerance) {
initialPosition = camera.position.clone();
initialQuaternion = camera.quaternion.clone();
document.getElementById("calibrateButton").hidden = true;
document.getElementById("myCanvas").hidden = true;
document.getElementById("recalibrateButton").hidden = true;
document.getElementById("cancelButton").hidden = true;
document.getElementById("calibration_message").hidden = true;
document.getElementById("calibration_coords").hidden = true;
calibrationSuccessful = true;
downloadAndListen();
}
}
}
function recalibrate() {
document.getElementById("calibrateButton").hidden = false;
document.getElementById("myCanvas").hidden = false;
document.getElementById("recalibrateButton").hidden = true;
document.getElementById("cancelButton").hidden = false;
document.getElementById("calibration_message").hidden = false;
document.getElementById("calibration_coords").hidden = false;
}
function cancelCalibrate() {
document.getElementById("calibrateButton").hidden = true;
document.getElementById("myCanvas").hidden = true;
document.getElementById("recalibrateButton").hidden = true;
document.getElementById("cancelButton").hidden = true;
document.getElementById("calibration_message").hidden = true;
document.getElementById("calibration_coords").hidden = true;
}
// Get a point in front of the device to use as the drawing location.
// Pass in a THREE.Vector3 to be populated with data to avoid creating garbage.
// This strange function structure is a way of avoiding creating garbage while
// generating the rotated forward vector.
var getDrawPoint = (function() {
// Setup a basic forward vector (scaled down so it's not so far away).
var forward = new THREE.Vector3(0, 0, -1).multiplyScalar(STROKE_DISTANCE);
// Create a scratch vector for generating the rotated forward vector.
var rotatedForward = new THREE.Vector3();
return function(out) {
// Start with the camera position, which is equivalent to device pose.
out.copy(camera.position);
// Rotate the forward vector by the pose orientation.
rotatedForward.copy(forward);
rotatedForward.applyQuaternion(camera.quaternion);
out.add(rotatedForward);
}
})();
// Basic physics parameters to help smooth out input data for nicer
// brsuh strokes
var strokeWidth = STROKE_WIDTH_MINIMUM;
var position = new THREE.Vector3();
var previousPosition = new THREE.Vector3();
var normalQuat = new THREE.Quaternion();
var previousNormalQuat = new THREE.Quaternion();
var normal = new THREE.Vector3(0, 0, 1);
var previousNormal = new THREE.Vector3(0, 0, 1);
var velocity = new THREE.Vector3();
var previousVelocity = new THREE.Vector3();
var acceleration = new THREE.Vector3();
// An array to keep track of the past accelerations
var accelerationArray = [];
/**
* This function returns an object that represents a point in the stroke.
* The position, normal, velocity and width values are used to help calculate
* how the stroke will look. They are set by the physics values being
* calculated every frame inside the updatePhysics function
*/
function getStroke()
{
return {
position: new THREE.Vector3(position.x, position.y, position.z),
normal: new THREE.Vector3(normal.x, normal.y, normal.z),
velocity: new THREE.Vector3(velocity.x, velocity.y, velocity.z),
width: strokeWidth
};
}
function updateStroke(id, new_stroke) {
// if (id in generatedStrokes) {
// // scene.remove(generatedStrokes[id]);
// }
for (var i = 0; i < new_stroke.length; i++) {
var vec3Pos = new THREE.Vector3(new_stroke[i].position.x,
new_stroke[i].position.y,
new_stroke[i].position.z);
vec3Pos.applyQuaternion(initialQuaternion);
new_stroke[i].position.x = vec3Pos.x + initialPosition.x;
new_stroke[i].position.y = vec3Pos.y + initialPosition.y;
new_stroke[i].position.z = vec3Pos.z + initialPosition.z;
}
generatedStrokes[id] = generateStroke(new_stroke);
scene.add(generatedStrokes[id]);
}
/**
* This function is called when the user touches down and starts to draw
* a stroke
*/
function processDraw() {
// Prevents the user from drawing if they have not yet calibrated.
if (!calibrationSuccessful) {
return;
}
// Add the draw point to the current stroke.
stroke.push(getStroke());
// Check to see if you have enough points (2) to start drawing a stroke
if(stroke.length < MINIMUM_STROKE_POINTS) {
return;
}
// Get the last two points in the stroke
var s0 = stroke[stroke.length - 2];
var s1 = stroke[stroke.length - 1];
// Check if the distance between the last two points is bigger
// than a set amount, this avoids tiny strips in the brush stroke
if(s0.position.distanceTo(s1.position) < MINIMIM_POINT_DISTANCE) {
stroke.pop();
return;
}
var strokeId = curStrokeRef.key;
// Remove the old stroke from the scene so we can regenerate the stroke.
if (generatedStrokes[strokeId]) {
scene.remove(generatedStrokes[strokeId]);
}
// Generate a new stroke and cache it in our array of strokes
var generated = generateStroke(stroke);
generatedStrokes[strokeId] = generated;
// Save into the DB
// fixed_stroke = stroke;
// console.log(JSON.stringify(stroke));
var fixedStroke = [];
var conjugate = initialQuaternion.conjugate();
for (let s of stroke) {
let sVector3 = new THREE.Vector3(s.position.x - initialPosition.x,
s.position.y - initialPosition.y,
s.position.z - initialPosition.z);
sVector3.applyQuaternion(conjugate);
let newS = {position: sVector3, normal: s.normal, velocity: s.velocity, width: s.width};
fixedStroke.push(newS);
}
// fixed_stroke.x -= initialPosition.x;
// fixed_stroke.y -= initialPosition.y;
// fixed_stroke.z -= initialPosition.z;
curStrokeRef.set(fixedStroke);
// Add the stroke to the threejs scene for rendering
scene.add(generatedStrokes[strokeId]);
}
/**
* This function is responsible for constantly rendering a brush reticle
* when the user is not touching down on the screen. This helps to pre-
* visualize the dynamics of the brush (color, stroke width, etc)
*/
function processDrawBrush() {
// Add the draw point to the current stroke.
brushStroke.push(getStroke());
// Don't use positions if they aren't far enough away from the previous point.
if(brushStroke.length < MINIMUM_BRUSH_POINTS) {
return;
}
// Remove the old brush reticle from the scene so we can regenerate it
if (brush) {
brushStroke.shift();
scene.remove(brush);
}
// Get the last two points in the stroke
var b0 = brushStroke[brushStroke.length - 2];
var b1 = brushStroke[brushStroke.length - 1];
// Check if the distance between the last two points is bigger
// than a set amount, this avoids tiny strips in the brush stroke
if(b0.position.distanceTo(b1.position) < MINIMIM_POINT_DISTANCE) {
return;
}
// Generate a new stroke and sets it to brush reticle
brush = generateStroke(brushStroke);
// Add the brush reticle to the threejs scene for rendering
scene.add(brush);
}
/**
* This function calculates the positions, normals, uvs and velocities
* float32Arrays needed to create a buffer geometry so we can render our brush
* strokes using threejs (WebGL)
*/
function generateStroke(strokeData) {
// Create Float32Arrays of the proper size to hold vertex information
// Each stroke is rendered by a triangle strip, thus each stroke point yields
// two verticies, and each vertex contains three (x, y, z) or two
// floats (u, v) depending on property.
var positions = new Float32Array(strokeData.length * 2 * 3);
var normals = new Float32Array(strokeData.length * 2 * 3);
var uvs = new Float32Array(strokeData.length * 2 * 2);
var velocities = new Float32Array(strokeData.length * 2 * 3);
// Create a Vector3 to cache the vertex offset vector
var v = new THREE.Vector3();
// Create a reference to the last stroke point's velocity
var lastVelocity;
// Create two Vector3s to cache the positions of the two vertex positions
// calculated from the origin stroke position and its physics properties
var p1 = new THREE.Vector3();
var p2 = new THREE.Vector3();
// Create a variable to keep track of whether the current stroke point's
// velocity is in the opposite direction of the last point's velocity
var flip = false;
// loop through the stroke points and calculate its two emiited vertex
// offset positions and set their other properties in the Float32Arrays
// created above
for (var i = 0; i < strokeData.length; i++) {
// Get the current stroke point
var entry = strokeData[i];
// Calculate the stroke point's offset by using the cross product of the
// stroke point's normal and its velocity
v.crossVectors(entry.normal, entry.velocity);
// Normalize the vector and scale it by the precalculated stroke width
v.normalize();
// This width is based on how fast the user was moving the device when the
// stroke was drawn
v.multiplyScalar(entry.width);
// If this isn't the first point check for a velocity flip, otherwise create
// a new Vector3
if( lastVelocity ) {
// Use the dot product to check whether the current velocity is in the
// direction as the previous stroke's velocity
var directionChange = lastVelocity.dot( entry.velocity );
if( directionChange < 0 ) {
flip = !flip;
}
}
else {
lastVelocity = new THREE.Vector3();
}
lastVelocity.copy( entry.velocity );
// this is used to avoid improper drawing of triangles when the user
// changes direction of a stroke
if( flip ) {
p1.addVectors(entry.position, v);
p2.subVectors(entry.position, v);
}
else {
p1.subVectors(entry.position, v);
p2.addVectors(entry.position, v);
}
// Calculate the offset for the current vertex (i * 2 * 3)
// Set the positions for the ith and ith + 1 verticies
var index = i * 2 * 3;
positions[index] = p1.x;
positions[index + 1] = p1.y;
positions[index + 2] = p1.z;
positions[index + 3] = p2.x;
positions[index + 4] = p2.y;
positions[index + 5] = p2.z;
// Calculate the offset for the current vertex (i * 2 * 3)
index = i * 2 * 3;
// Set the normals for the ith and ith + 1 verticies
normals[index] = entry.normal.x;
normals[index + 1] = entry.normal.y;
normals[index + 2] = entry.normal.z;
normals[index + 3] = entry.normal.x;
normals[index + 4] = entry.normal.y;
normals[index + 5] = entry.normal.z;
// Calculate the offset for the current vertex (i * 2 * 3)
index = i * 2 * 3;
// Set the velocities for the ith and ith + 1 verticies
velocities[index] = entry.velocity.x;
velocities[index + 1] = entry.velocity.y;
velocities[index + 2] = entry.velocity.z;
velocities[index + 3] = entry.velocity.x;
velocities[index + 4] = entry.velocity.y;
velocities[index + 5] = entry.velocity.z;
// Calculate the offset for the current vertex (i * 2 * 3)
index = i * 2 * 2;
// Set the uvs for the ith and ith + 1 verticies
// Each complete stroke has vertex coordinates from (0,0) to (1,1)
uvs[index] = i / (strokeData.length - 1);
uvs[index + 1] = 0;
uvs[index + 2] = i / (strokeData.length - 1);
uvs[index + 3] = 1;
}
// Create a Threejs BufferGeometry
var geometry = new THREE.BufferGeometry();
function disposeArray() { this.array = null; }
// Add attributes to the buffer geometry using the Float32Arrays created
// above. This will tell our shader pipeline information about each vertex
// so we can create all types of dynamic strokes & paints
geometry.addAttribute('position', new THREE.BufferAttribute(positions, 3).onUpload(disposeArray));
geometry.addAttribute('normal', new THREE.BufferAttribute(normals, 3).onUpload(disposeArray));
geometry.addAttribute('uv', new THREE.BufferAttribute(uvs, 2).onUpload(disposeArray));
geometry.addAttribute('velocity', new THREE.BufferAttribute(velocities, 3).onUpload(disposeArray));
geometry.computeBoundingSphere();
// Create a Threejs mesh and set its draw mode to triangle strips
var mesh = new THREE.Mesh(geometry, drawMaterial);
mesh.drawMode = THREE.TriangleStripDrawMode;
return mesh;
}
/**
* Small utility function to sum the last n frames of acceleration to see if
* the device was shaken. If the device was shaken, then the last stroke is
* erased.
*/
function checkForShake() {
//Calculate the current acceleration's magnitude and add it to the
// acceleration array
accelerationArray.push(acceleration.length());
var len = accelerationArray.length;
// if the accelerationArray has enough frames to calculate whether the user
// has shaken the device, then check for a shake
if(len > MINIMIM_ERASE_ENERGY_FRAMES) {
// Sum the "energy" total by looping through the accelerationArray values
var energy = 0;
for(var i = 0; i < len; i++) {
energy += accelerationArray[i];
}
// Check to see if the total energy is greate than a preset amount
// this amount was calculated via user testing different shake thresholds
if(energy > MINIMIM_ERASE_ENERGY * MINIMIM_ERASE_ENERGY_FRAMES) {
// If a shake was detected, clear the accelerationArray so we don't get
// multiple shakes in a small time frame
accelerationArray.length = 0;
// *********************************************************************
// This is the action that happens when the user shakes the device
// *********************************************************************
// console.log("SHAKE DETECTED");
// **********************************************************************
}
else {
// If the energy wasn't high enough pop off the oldest acceleration value
accelerationArray.shift();
}
}
}
/**
* Clears all the strokes and essentially lets the user start over
*/
function clearStrokes()
{
// If the user if currently drawing, stop
drawing = false;
// Clear the current stroke array
stroke.length = 0;
// Remove all the threejs mesh strokes from the scene
//var len = strokes.length;
// for( var key in strokes.keys() ) {
// scene.remove(strokes[key]); <- we don't have strokes[] any more
// }
// TODO: implement if you want to use me; it's not used currently!!!! -joe
}
/**
* update physics function, called once per frame. Handles updating and
* smoothing out rough input data from AR Session
*/
function updatePhysics()
{
// Cache previous positions & normals
previousPosition.copy(position);
previousNormal.copy(normal);
previousNormalQuat.copy(normalQuat);
previousVelocity.copy(velocity);
// Update Drawing Stroke Position
getDrawPoint(position);
position.lerpVectors(previousPosition, position, STROKE_POSITION_EASING);
normalQuat.slerp(camera.quaternion, STROKE_NORMAL_QUAT_EASING);
// Update Drawing Stroke Normal
normal.set(0, 0, 1);
normal.applyQuaternion(normalQuat);
normal.normalize();
normal.lerpVectors(previousNormal, normal, STROKE_NORMAL_EASING);
// Update velocity
velocity.subVectors(position, previousPosition);
velocity.lerpVectors(previousVelocity, velocity, STROKE_VELOCITY_EASING);
// Update acceleration
acceleration.subVectors(velocity, previousVelocity);
// Update Drawing Stroke Width
strokeWidth = THREE.Math.lerp(strokeWidth, velocity.length() * STROKE_WIDTH_MODIFIER, STROKE_WIDTH_EASING);
strokeWidth = Math.min( Math.max( strokeWidth, STROKE_WIDTH_MINIMUM ), STROKE_WIDTH_MAXIMUM );
}
var frame = 0;
/**
* The render loop, called once per frame. Handles updating
* our scene and rendering.
*/
function update() {
// if (initialPosition) {
// var position = (camera.position.x - initialPosition.x) + ", " +
// (camera.position.y - initialPosition.y) + ", " +
// (camera.position.z - initialPosition.z) + ", BETA:" + beta + ", GAMMA:" + gamma;
// }
//document.getElementById("debug_output").innerHTML = Object.keys(camera.position).join();
// document.getElementById("debug_output").innerHTML = position;
// document.getElementById("debug_output").innerHTML = Object.values(camera.quaternion).join("<br/>");
if (!(camera.position.x == 0 && camera.position.y == 0 && camera.position.z == 0)) {
document.getElementById("calibration_coords").innerHTML = "(" + Math.round(beta) + ", " + Math.round(gamma) + ")";
if (beta <= orientationTolerance && beta >= -orientationTolerance &&
gamma <= orientationTolerance && gamma >= -orientationTolerance) {
document.getElementById("calibration_coords").style.color = "green";
drawBox('green');
} else {
document.getElementById("calibration_coords").style.color = "red";
drawBox('red');
}
} else {
document.getElementById("calibration_coords").innerHTML = "Looking for position...";
document.getElementById("calibration_coords").style.color = "red";
drawBox('red');
}
// Clears color from the frame before rendering the camera (arView) or scene.
renderer.clearColor();
// Render the device's camera stream on screen first of all.
// It allows to get the right pose synchronized with the right frame.
arView.render();
// Update our camera projection matrix in the event that
// the near or far planes have updated
camera.updateProjectionMatrix();
// Update our perspective camera's positioning
vrControls.update();
// Update Brush Physics
updatePhysics();
// Check for shake to undo functionality
// checkForShake();
// Update the current graffiti stroke.
if (drawing) {
if (frame % SAMPLERATE == 0) {
processDraw();
frame = 0;
}
frame++;
}
// Uncomment to draw brush reticle
// processDrawBrush();
// Render our three.js virtual scene
renderer.clearDepth();
renderer.render(scene, camera);
// Kick off the requestAnimationFrame to call this function
// when a new VRDisplay frame is rendered
vrDisplay.requestAnimationFrame(update);
}
/**
* On window resize, update the perspective camera's aspect ratio,
* and call `updateProjectionMatrix` so that we can get the latest
* projection matrix provided from the device
*/
function onWindowResize () {
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
renderer.setSize(window.innerWidth, window.innerHeight);
}
</script>
</body>
</html>