summaryrefslogtreecommitdiff
path: root/VRCSDK3Worlds/Assets/Editor/x64/Bakery/denoiseFinishSH72.ptx
diff options
context:
space:
mode:
authortylermurphy534 <tylermurphy534@gmail.com>2022-11-06 15:12:42 -0500
committertylermurphy534 <tylermurphy534@gmail.com>2022-11-06 15:12:42 -0500
commiteb84bb298d2b95aec7b2ae12cbf25ac64f25379a (patch)
treeefd616a157df06ab661c6d56651853431ac6b08b /VRCSDK3Worlds/Assets/Editor/x64/Bakery/denoiseFinishSH72.ptx
downloadunityprojects-eb84bb298d2b95aec7b2ae12cbf25ac64f25379a.tar.gz
unityprojects-eb84bb298d2b95aec7b2ae12cbf25ac64f25379a.tar.bz2
unityprojects-eb84bb298d2b95aec7b2ae12cbf25ac64f25379a.zip
move to self host
Diffstat (limited to 'VRCSDK3Worlds/Assets/Editor/x64/Bakery/denoiseFinishSH72.ptx')
-rw-r--r--VRCSDK3Worlds/Assets/Editor/x64/Bakery/denoiseFinishSH72.ptx526
1 files changed, 526 insertions, 0 deletions
diff --git a/VRCSDK3Worlds/Assets/Editor/x64/Bakery/denoiseFinishSH72.ptx b/VRCSDK3Worlds/Assets/Editor/x64/Bakery/denoiseFinishSH72.ptx
new file mode 100644
index 00000000..cd5ee610
--- /dev/null
+++ b/VRCSDK3Worlds/Assets/Editor/x64/Bakery/denoiseFinishSH72.ptx
@@ -0,0 +1,526 @@
+//
+// Generated by NVIDIA NVVM Compiler
+//
+// Compiler Build ID: CL-23083092
+// Cuda compilation tools, release 9.1, V9.1.85
+// Based on LLVM 3.4svn
+//
+
+.version 6.1
+.target sm_30
+.address_size 64
+
+ // .globl __raygen__oxMain
+.const .align 8 .b8 cs[32];
+
+.visible .entry __raygen__oxMain(
+
+)
+{
+ .reg .pred %p<53>;
+ .reg .b16 %rs<9>;
+ .reg .f32 %f<326>;
+ .reg .b32 %r<39>;
+ .reg .b64 %rd<14>;
+
+
+ // inline asm
+ call (%r1), _optix_get_launch_index_x, ();
+ // inline asm
+ // inline asm
+ call (%r2), _optix_get_launch_index_y, ();
+ // inline asm
+ ld.const.u64 %rd2, [cs+8];
+ cvta.to.global.u64 %rd3, %rd2;
+ ld.const.v2.u32 {%r4, %r5}, [cs+24];
+ mad.lo.s32 %r7, %r4, %r2, %r1;
+ cvt.u64.u32 %rd1, %r7;
+ mul.wide.u32 %rd4, %r7, 16;
+ add.s64 %rd5, %rd3, %rd4;
+ ld.global.v4.f32 {%f43, %f44, %f45, %f46}, [%rd5];
+ setp.gt.s32 %p4, %r5, 0;
+ @%p4 bra BB0_2;
+ bra.uni BB0_1;
+
+BB0_2:
+ mov.f32 %f53, 0f3F8CCCCD;
+ cvt.rzi.f32.f32 %f54, %f53;
+ fma.rn.f32 %f55, %f54, 0fC0000000, 0f400CCCCD;
+ abs.f32 %f4, %f55;
+ abs.f32 %f5, %f43;
+ setp.lt.f32 %p5, %f5, 0f00800000;
+ mul.f32 %f56, %f5, 0f4B800000;
+ selp.f32 %f57, 0fC3170000, 0fC2FE0000, %p5;
+ selp.f32 %f58, %f56, %f5, %p5;
+ mov.b32 %r9, %f58;
+ and.b32 %r10, %r9, 8388607;
+ or.b32 %r11, %r10, 1065353216;
+ mov.b32 %f59, %r11;
+ shr.u32 %r12, %r9, 23;
+ cvt.rn.f32.u32 %f60, %r12;
+ add.f32 %f61, %f57, %f60;
+ setp.gt.f32 %p6, %f59, 0f3FB504F3;
+ mul.f32 %f62, %f59, 0f3F000000;
+ add.f32 %f63, %f61, 0f3F800000;
+ selp.f32 %f64, %f62, %f59, %p6;
+ selp.f32 %f65, %f63, %f61, %p6;
+ add.f32 %f66, %f64, 0fBF800000;
+ add.f32 %f52, %f64, 0f3F800000;
+ // inline asm
+ rcp.approx.ftz.f32 %f51,%f52;
+ // inline asm
+ add.f32 %f67, %f66, %f66;
+ mul.f32 %f68, %f51, %f67;
+ mul.f32 %f69, %f68, %f68;
+ mov.f32 %f70, 0f3C4CAF63;
+ mov.f32 %f71, 0f3B18F0FE;
+ fma.rn.f32 %f72, %f71, %f69, %f70;
+ mov.f32 %f73, 0f3DAAAABD;
+ fma.rn.f32 %f74, %f72, %f69, %f73;
+ mul.rn.f32 %f75, %f74, %f69;
+ mul.rn.f32 %f76, %f75, %f68;
+ sub.f32 %f77, %f66, %f68;
+ neg.f32 %f78, %f68;
+ add.f32 %f79, %f77, %f77;
+ fma.rn.f32 %f80, %f78, %f66, %f79;
+ mul.rn.f32 %f81, %f51, %f80;
+ add.f32 %f82, %f76, %f68;
+ sub.f32 %f83, %f68, %f82;
+ add.f32 %f84, %f76, %f83;
+ add.f32 %f85, %f81, %f84;
+ add.f32 %f86, %f82, %f85;
+ sub.f32 %f87, %f82, %f86;
+ add.f32 %f88, %f85, %f87;
+ mov.f32 %f89, 0f3F317200;
+ mul.rn.f32 %f90, %f65, %f89;
+ mov.f32 %f91, 0f35BFBE8E;
+ mul.rn.f32 %f92, %f65, %f91;
+ add.f32 %f93, %f90, %f86;
+ sub.f32 %f94, %f90, %f93;
+ add.f32 %f95, %f86, %f94;
+ add.f32 %f96, %f88, %f95;
+ add.f32 %f97, %f92, %f96;
+ add.f32 %f98, %f93, %f97;
+ sub.f32 %f99, %f93, %f98;
+ add.f32 %f100, %f97, %f99;
+ mov.f32 %f101, 0f400CCCCD;
+ mul.rn.f32 %f102, %f101, %f98;
+ neg.f32 %f103, %f102;
+ fma.rn.f32 %f104, %f101, %f98, %f103;
+ fma.rn.f32 %f105, %f101, %f100, %f104;
+ mov.f32 %f106, 0f00000000;
+ fma.rn.f32 %f107, %f106, %f98, %f105;
+ add.rn.f32 %f108, %f102, %f107;
+ neg.f32 %f109, %f108;
+ add.rn.f32 %f110, %f102, %f109;
+ add.rn.f32 %f111, %f110, %f107;
+ mov.b32 %r13, %f108;
+ setp.eq.s32 %p7, %r13, 1118925336;
+ add.s32 %r14, %r13, -1;
+ mov.b32 %f112, %r14;
+ add.f32 %f113, %f111, 0f37000000;
+ selp.f32 %f114, %f112, %f108, %p7;
+ selp.f32 %f6, %f113, %f111, %p7;
+ mul.f32 %f115, %f114, 0f3FB8AA3B;
+ cvt.rzi.f32.f32 %f116, %f115;
+ mov.f32 %f117, 0fBF317200;
+ fma.rn.f32 %f118, %f116, %f117, %f114;
+ mov.f32 %f119, 0fB5BFBE8E;
+ fma.rn.f32 %f120, %f116, %f119, %f118;
+ mul.f32 %f121, %f120, 0f3FB8AA3B;
+ ex2.approx.ftz.f32 %f122, %f121;
+ add.f32 %f123, %f116, 0f00000000;
+ ex2.approx.f32 %f124, %f123;
+ mul.f32 %f125, %f122, %f124;
+ setp.lt.f32 %p8, %f114, 0fC2D20000;
+ selp.f32 %f126, 0f00000000, %f125, %p8;
+ setp.gt.f32 %p9, %f114, 0f42D20000;
+ selp.f32 %f317, 0f7F800000, %f126, %p9;
+ setp.eq.f32 %p10, %f317, 0f7F800000;
+ @%p10 bra BB0_4;
+
+ fma.rn.f32 %f317, %f317, %f6, %f317;
+
+BB0_4:
+ setp.lt.f32 %p11, %f43, 0f00000000;
+ setp.eq.f32 %p12, %f4, 0f3F800000;
+ and.pred %p1, %p11, %p12;
+ mov.b32 %r15, %f317;
+ xor.b32 %r16, %r15, -2147483648;
+ mov.b32 %f127, %r16;
+ selp.f32 %f319, %f127, %f317, %p1;
+ setp.eq.f32 %p13, %f43, 0f00000000;
+ @%p13 bra BB0_7;
+ bra.uni BB0_5;
+
+BB0_7:
+ add.f32 %f130, %f43, %f43;
+ selp.f32 %f319, %f130, 0f00000000, %p12;
+ bra.uni BB0_8;
+
+BB0_1:
+ ld.const.u64 %rd6, [cs];
+ cvta.to.global.u64 %rd7, %rd6;
+ shl.b64 %rd8, %rd1, 3;
+ add.s64 %rd9, %rd7, %rd8;
+ mov.f32 %f50, 0f3F800000;
+ // inline asm
+ { cvt.rn.f16.f32 %rs4, %f50;}
+
+ // inline asm
+ // inline asm
+ { cvt.rn.f16.f32 %rs3, %f45;}
+
+ // inline asm
+ // inline asm
+ { cvt.rn.f16.f32 %rs2, %f44;}
+
+ // inline asm
+ // inline asm
+ { cvt.rn.f16.f32 %rs1, %f43;}
+
+ // inline asm
+ st.global.v4.u16 [%rd9], {%rs1, %rs2, %rs3, %rs4};
+ bra.uni BB0_36;
+
+BB0_5:
+ setp.geu.f32 %p14, %f43, 0f00000000;
+ @%p14 bra BB0_8;
+
+ cvt.rzi.f32.f32 %f129, %f101;
+ setp.neu.f32 %p15, %f129, 0f400CCCCD;
+ selp.f32 %f319, 0f7FFFFFFF, %f319, %p15;
+
+BB0_8:
+ abs.f32 %f298, %f43;
+ add.f32 %f131, %f298, 0f400CCCCD;
+ mov.b32 %r17, %f131;
+ setp.lt.s32 %p17, %r17, 2139095040;
+ @%p17 bra BB0_13;
+
+ abs.f32 %f315, %f43;
+ setp.gtu.f32 %p18, %f315, 0f7F800000;
+ @%p18 bra BB0_12;
+ bra.uni BB0_10;
+
+BB0_12:
+ add.f32 %f319, %f43, 0f400CCCCD;
+ bra.uni BB0_13;
+
+BB0_10:
+ abs.f32 %f316, %f43;
+ setp.neu.f32 %p19, %f316, 0f7F800000;
+ @%p19 bra BB0_13;
+
+ selp.f32 %f319, 0fFF800000, 0f7F800000, %p1;
+
+BB0_13:
+ mov.f32 %f306, 0fB5BFBE8E;
+ mov.f32 %f305, 0fBF317200;
+ mov.f32 %f304, 0f00000000;
+ mov.f32 %f303, 0f35BFBE8E;
+ mov.f32 %f302, 0f3F317200;
+ mov.f32 %f301, 0f3DAAAABD;
+ mov.f32 %f300, 0f3C4CAF63;
+ mov.f32 %f299, 0f3B18F0FE;
+ setp.eq.f32 %p20, %f43, 0f3F800000;
+ selp.f32 %f134, 0f3F800000, %f319, %p20;
+ cvt.sat.f32.f32 %f17, %f134;
+ abs.f32 %f18, %f44;
+ setp.lt.f32 %p21, %f18, 0f00800000;
+ mul.f32 %f135, %f18, 0f4B800000;
+ selp.f32 %f136, 0fC3170000, 0fC2FE0000, %p21;
+ selp.f32 %f137, %f135, %f18, %p21;
+ mov.b32 %r18, %f137;
+ and.b32 %r19, %r18, 8388607;
+ or.b32 %r20, %r19, 1065353216;
+ mov.b32 %f138, %r20;
+ shr.u32 %r21, %r18, 23;
+ cvt.rn.f32.u32 %f139, %r21;
+ add.f32 %f140, %f136, %f139;
+ setp.gt.f32 %p22, %f138, 0f3FB504F3;
+ mul.f32 %f141, %f138, 0f3F000000;
+ add.f32 %f142, %f140, 0f3F800000;
+ selp.f32 %f143, %f141, %f138, %p22;
+ selp.f32 %f144, %f142, %f140, %p22;
+ add.f32 %f145, %f143, 0fBF800000;
+ add.f32 %f133, %f143, 0f3F800000;
+ // inline asm
+ rcp.approx.ftz.f32 %f132,%f133;
+ // inline asm
+ add.f32 %f146, %f145, %f145;
+ mul.f32 %f147, %f132, %f146;
+ mul.f32 %f148, %f147, %f147;
+ fma.rn.f32 %f151, %f299, %f148, %f300;
+ fma.rn.f32 %f153, %f151, %f148, %f301;
+ mul.rn.f32 %f154, %f153, %f148;
+ mul.rn.f32 %f155, %f154, %f147;
+ sub.f32 %f156, %f145, %f147;
+ neg.f32 %f157, %f147;
+ add.f32 %f158, %f156, %f156;
+ fma.rn.f32 %f159, %f157, %f145, %f158;
+ mul.rn.f32 %f160, %f132, %f159;
+ add.f32 %f161, %f155, %f147;
+ sub.f32 %f162, %f147, %f161;
+ add.f32 %f163, %f155, %f162;
+ add.f32 %f164, %f160, %f163;
+ add.f32 %f165, %f161, %f164;
+ sub.f32 %f166, %f161, %f165;
+ add.f32 %f167, %f164, %f166;
+ mul.rn.f32 %f169, %f144, %f302;
+ mul.rn.f32 %f171, %f144, %f303;
+ add.f32 %f172, %f169, %f165;
+ sub.f32 %f173, %f169, %f172;
+ add.f32 %f174, %f165, %f173;
+ add.f32 %f175, %f167, %f174;
+ add.f32 %f176, %f171, %f175;
+ add.f32 %f177, %f172, %f176;
+ sub.f32 %f178, %f172, %f177;
+ add.f32 %f179, %f176, %f178;
+ mul.rn.f32 %f181, %f101, %f177;
+ neg.f32 %f182, %f181;
+ fma.rn.f32 %f183, %f101, %f177, %f182;
+ fma.rn.f32 %f184, %f101, %f179, %f183;
+ fma.rn.f32 %f186, %f304, %f177, %f184;
+ add.rn.f32 %f187, %f181, %f186;
+ neg.f32 %f188, %f187;
+ add.rn.f32 %f189, %f181, %f188;
+ add.rn.f32 %f190, %f189, %f186;
+ mov.b32 %r22, %f187;
+ setp.eq.s32 %p23, %r22, 1118925336;
+ add.s32 %r23, %r22, -1;
+ mov.b32 %f191, %r23;
+ add.f32 %f192, %f190, 0f37000000;
+ selp.f32 %f193, %f191, %f187, %p23;
+ selp.f32 %f19, %f192, %f190, %p23;
+ mul.f32 %f194, %f193, 0f3FB8AA3B;
+ cvt.rzi.f32.f32 %f195, %f194;
+ fma.rn.f32 %f197, %f195, %f305, %f193;
+ fma.rn.f32 %f199, %f195, %f306, %f197;
+ mul.f32 %f200, %f199, 0f3FB8AA3B;
+ ex2.approx.ftz.f32 %f201, %f200;
+ add.f32 %f202, %f195, 0f00000000;
+ ex2.approx.f32 %f203, %f202;
+ mul.f32 %f204, %f201, %f203;
+ setp.lt.f32 %p24, %f193, 0fC2D20000;
+ selp.f32 %f205, 0f00000000, %f204, %p24;
+ setp.gt.f32 %p25, %f193, 0f42D20000;
+ selp.f32 %f320, 0f7F800000, %f205, %p25;
+ setp.eq.f32 %p26, %f320, 0f7F800000;
+ @%p26 bra BB0_15;
+
+ fma.rn.f32 %f320, %f320, %f19, %f320;
+
+BB0_15:
+ setp.lt.f32 %p27, %f44, 0f00000000;
+ and.pred %p2, %p27, %p12;
+ mov.b32 %r24, %f320;
+ xor.b32 %r25, %r24, -2147483648;
+ mov.b32 %f206, %r25;
+ selp.f32 %f322, %f206, %f320, %p2;
+ setp.eq.f32 %p29, %f44, 0f00000000;
+ @%p29 bra BB0_18;
+ bra.uni BB0_16;
+
+BB0_18:
+ add.f32 %f209, %f44, %f44;
+ selp.f32 %f322, %f209, 0f00000000, %p12;
+ bra.uni BB0_19;
+
+BB0_16:
+ setp.geu.f32 %p30, %f44, 0f00000000;
+ @%p30 bra BB0_19;
+
+ cvt.rzi.f32.f32 %f208, %f101;
+ setp.neu.f32 %p31, %f208, 0f400CCCCD;
+ selp.f32 %f322, 0f7FFFFFFF, %f322, %p31;
+
+BB0_19:
+ add.f32 %f210, %f18, 0f400CCCCD;
+ mov.b32 %r26, %f210;
+ setp.lt.s32 %p33, %r26, 2139095040;
+ @%p33 bra BB0_24;
+
+ setp.gtu.f32 %p34, %f18, 0f7F800000;
+ @%p34 bra BB0_23;
+ bra.uni BB0_21;
+
+BB0_23:
+ add.f32 %f322, %f44, 0f400CCCCD;
+ bra.uni BB0_24;
+
+BB0_21:
+ setp.neu.f32 %p35, %f18, 0f7F800000;
+ @%p35 bra BB0_24;
+
+ selp.f32 %f322, 0fFF800000, 0f7F800000, %p2;
+
+BB0_24:
+ mov.f32 %f314, 0fB5BFBE8E;
+ mov.f32 %f313, 0fBF317200;
+ mov.f32 %f312, 0f00000000;
+ mov.f32 %f311, 0f35BFBE8E;
+ mov.f32 %f310, 0f3F317200;
+ mov.f32 %f309, 0f3DAAAABD;
+ mov.f32 %f308, 0f3C4CAF63;
+ mov.f32 %f307, 0f3B18F0FE;
+ setp.eq.f32 %p36, %f44, 0f3F800000;
+ selp.f32 %f213, 0f3F800000, %f322, %p36;
+ cvt.sat.f32.f32 %f30, %f213;
+ abs.f32 %f31, %f45;
+ setp.lt.f32 %p37, %f31, 0f00800000;
+ mul.f32 %f214, %f31, 0f4B800000;
+ selp.f32 %f215, 0fC3170000, 0fC2FE0000, %p37;
+ selp.f32 %f216, %f214, %f31, %p37;
+ mov.b32 %r27, %f216;
+ and.b32 %r28, %r27, 8388607;
+ or.b32 %r29, %r28, 1065353216;
+ mov.b32 %f217, %r29;
+ shr.u32 %r30, %r27, 23;
+ cvt.rn.f32.u32 %f218, %r30;
+ add.f32 %f219, %f215, %f218;
+ setp.gt.f32 %p38, %f217, 0f3FB504F3;
+ mul.f32 %f220, %f217, 0f3F000000;
+ add.f32 %f221, %f219, 0f3F800000;
+ selp.f32 %f222, %f220, %f217, %p38;
+ selp.f32 %f223, %f221, %f219, %p38;
+ add.f32 %f224, %f222, 0fBF800000;
+ add.f32 %f212, %f222, 0f3F800000;
+ // inline asm
+ rcp.approx.ftz.f32 %f211,%f212;
+ // inline asm
+ add.f32 %f225, %f224, %f224;
+ mul.f32 %f226, %f211, %f225;
+ mul.f32 %f227, %f226, %f226;
+ fma.rn.f32 %f230, %f307, %f227, %f308;
+ fma.rn.f32 %f232, %f230, %f227, %f309;
+ mul.rn.f32 %f233, %f232, %f227;
+ mul.rn.f32 %f234, %f233, %f226;
+ sub.f32 %f235, %f224, %f226;
+ neg.f32 %f236, %f226;
+ add.f32 %f237, %f235, %f235;
+ fma.rn.f32 %f238, %f236, %f224, %f237;
+ mul.rn.f32 %f239, %f211, %f238;
+ add.f32 %f240, %f234, %f226;
+ sub.f32 %f241, %f226, %f240;
+ add.f32 %f242, %f234, %f241;
+ add.f32 %f243, %f239, %f242;
+ add.f32 %f244, %f240, %f243;
+ sub.f32 %f245, %f240, %f244;
+ add.f32 %f246, %f243, %f245;
+ mul.rn.f32 %f248, %f223, %f310;
+ mul.rn.f32 %f250, %f223, %f311;
+ add.f32 %f251, %f248, %f244;
+ sub.f32 %f252, %f248, %f251;
+ add.f32 %f253, %f244, %f252;
+ add.f32 %f254, %f246, %f253;
+ add.f32 %f255, %f250, %f254;
+ add.f32 %f256, %f251, %f255;
+ sub.f32 %f257, %f251, %f256;
+ add.f32 %f258, %f255, %f257;
+ mul.rn.f32 %f260, %f101, %f256;
+ neg.f32 %f261, %f260;
+ fma.rn.f32 %f262, %f101, %f256, %f261;
+ fma.rn.f32 %f263, %f101, %f258, %f262;
+ fma.rn.f32 %f265, %f312, %f256, %f263;
+ add.rn.f32 %f266, %f260, %f265;
+ neg.f32 %f267, %f266;
+ add.rn.f32 %f268, %f260, %f267;
+ add.rn.f32 %f269, %f268, %f265;
+ mov.b32 %r31, %f266;
+ setp.eq.s32 %p39, %r31, 1118925336;
+ add.s32 %r32, %r31, -1;
+ mov.b32 %f270, %r32;
+ add.f32 %f271, %f269, 0f37000000;
+ selp.f32 %f272, %f270, %f266, %p39;
+ selp.f32 %f32, %f271, %f269, %p39;
+ mul.f32 %f273, %f272, 0f3FB8AA3B;
+ cvt.rzi.f32.f32 %f274, %f273;
+ fma.rn.f32 %f276, %f274, %f313, %f272;
+ fma.rn.f32 %f278, %f274, %f314, %f276;
+ mul.f32 %f279, %f278, 0f3FB8AA3B;
+ ex2.approx.ftz.f32 %f280, %f279;
+ add.f32 %f281, %f274, 0f00000000;
+ ex2.approx.f32 %f282, %f281;
+ mul.f32 %f283, %f280, %f282;
+ setp.lt.f32 %p40, %f272, 0fC2D20000;
+ selp.f32 %f284, 0f00000000, %f283, %p40;
+ setp.gt.f32 %p41, %f272, 0f42D20000;
+ selp.f32 %f323, 0f7F800000, %f284, %p41;
+ setp.eq.f32 %p42, %f323, 0f7F800000;
+ @%p42 bra BB0_26;
+
+ fma.rn.f32 %f323, %f323, %f32, %f323;
+
+BB0_26:
+ setp.lt.f32 %p43, %f45, 0f00000000;
+ and.pred %p3, %p43, %p12;
+ mov.b32 %r33, %f323;
+ xor.b32 %r34, %r33, -2147483648;
+ mov.b32 %f285, %r34;
+ selp.f32 %f325, %f285, %f323, %p3;
+ setp.eq.f32 %p45, %f45, 0f00000000;
+ @%p45 bra BB0_29;
+ bra.uni BB0_27;
+
+BB0_29:
+ add.f32 %f288, %f45, %f45;
+ selp.f32 %f325, %f288, 0f00000000, %p12;
+ bra.uni BB0_30;
+
+BB0_27:
+ setp.geu.f32 %p46, %f45, 0f00000000;
+ @%p46 bra BB0_30;
+
+ cvt.rzi.f32.f32 %f287, %f101;
+ setp.neu.f32 %p47, %f287, 0f400CCCCD;
+ selp.f32 %f325, 0f7FFFFFFF, %f325, %p47;
+
+BB0_30:
+ add.f32 %f289, %f31, 0f400CCCCD;
+ mov.b32 %r35, %f289;
+ setp.lt.s32 %p49, %r35, 2139095040;
+ @%p49 bra BB0_35;
+
+ setp.gtu.f32 %p50, %f31, 0f7F800000;
+ @%p50 bra BB0_34;
+ bra.uni BB0_32;
+
+BB0_34:
+ add.f32 %f325, %f45, 0f400CCCCD;
+ bra.uni BB0_35;
+
+BB0_32:
+ setp.neu.f32 %p51, %f31, 0f7F800000;
+ @%p51 bra BB0_35;
+
+ selp.f32 %f325, 0fFF800000, 0f7F800000, %p3;
+
+BB0_35:
+ setp.eq.f32 %p52, %f45, 0f3F800000;
+ selp.f32 %f290, 0f3F800000, %f325, %p52;
+ cvt.sat.f32.f32 %f291, %f290;
+ cvt.sat.f32.f32 %f292, %f17;
+ mul.f32 %f293, %f292, 0f437F0000;
+ cvt.rzi.u32.f32 %r36, %f293;
+ cvt.sat.f32.f32 %f294, %f30;
+ mul.f32 %f295, %f294, 0f437F0000;
+ cvt.rzi.u32.f32 %r37, %f295;
+ cvt.sat.f32.f32 %f296, %f291;
+ mul.f32 %f297, %f296, 0f437F0000;
+ cvt.rzi.u32.f32 %r38, %f297;
+ ld.const.u64 %rd10, [cs+16];
+ cvta.to.global.u64 %rd11, %rd10;
+ shl.b64 %rd12, %rd1, 2;
+ add.s64 %rd13, %rd11, %rd12;
+ cvt.u16.u32 %rs5, %r38;
+ cvt.u16.u32 %rs6, %r37;
+ cvt.u16.u32 %rs7, %r36;
+ mov.u16 %rs8, 255;
+ st.global.v4.u8 [%rd13], {%rs7, %rs6, %rs5, %rs8};
+
+BB0_36:
+ ret;
+}
+
+